summaryrefslogtreecommitdiff
path: root/video_engine
diff options
context:
space:
mode:
Diffstat (limited to 'video_engine')
-rw-r--r--video_engine/encoder_state_feedback_unittest.cc21
-rw-r--r--video_engine/include/vie_base.h32
-rw-r--r--video_engine/mock/mock_vie_frame_provider_base.h33
-rw-r--r--video_engine/overuse_frame_detector.cc28
-rw-r--r--video_engine/overuse_frame_detector.h36
-rw-r--r--video_engine/overuse_frame_detector_unittest.cc67
-rw-r--r--video_engine/video_engine_core.gypi1
-rw-r--r--video_engine/vie_base_impl.cc33
-rw-r--r--video_engine/vie_base_impl.h2
-rw-r--r--video_engine/vie_capturer.cc50
-rw-r--r--video_engine/vie_capturer.h10
-rw-r--r--video_engine/vie_capturer_unittest.cc263
-rw-r--r--video_engine/vie_encoder.cc4
-rw-r--r--video_engine/vie_remb_unittest.cc17
14 files changed, 474 insertions, 123 deletions
diff --git a/video_engine/encoder_state_feedback_unittest.cc b/video_engine/encoder_state_feedback_unittest.cc
index f85d9897..4e15752f 100644
--- a/video_engine/encoder_state_feedback_unittest.cc
+++ b/video_engine/encoder_state_feedback_unittest.cc
@@ -17,26 +17,17 @@
#include "webrtc/common.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/video_engine/vie_encoder.h"
-namespace webrtc {
+using ::testing::NiceMock;
-// TODO(mflodman) Create a common mock in module utility.
-class TestProcessThread : public ProcessThread {
- public:
- TestProcessThread() {}
- ~TestProcessThread() {}
- virtual int32_t Start() { return 0; }
- virtual int32_t Stop() { return 0; }
- virtual int32_t RegisterModule(Module* module) { return 0; }
- virtual int32_t DeRegisterModule(const Module* module) { return 0; }
-};
+namespace webrtc {
class MockVieEncoder : public ViEEncoder {
public:
- explicit MockVieEncoder(TestProcessThread* process_thread)
+ explicit MockVieEncoder(ProcessThread* process_thread)
: ViEEncoder(1, 1, 1, config_, *process_thread, NULL) {}
~MockVieEncoder() {}
@@ -55,10 +46,10 @@ class MockVieEncoder : public ViEEncoder {
class VieKeyRequestTest : public ::testing::Test {
protected:
virtual void SetUp() {
- process_thread_.reset(new TestProcessThread());
+ process_thread_.reset(new NiceMock<MockProcessThread>);
encoder_state_feedback_.reset(new EncoderStateFeedback());
}
- scoped_ptr<TestProcessThread> process_thread_;
+ scoped_ptr<MockProcessThread> process_thread_;
scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
};
diff --git a/video_engine/include/vie_base.h b/video_engine/include/vie_base.h
index a08cbacb..56916ce3 100644
--- a/video_engine/include/vie_base.h
+++ b/video_engine/include/vie_base.h
@@ -109,6 +109,24 @@ struct CpuOveruseOptions {
}
};
+struct CpuOveruseMetrics {
+ CpuOveruseMetrics()
+ : capture_jitter_ms(-1),
+ avg_encode_time_ms(-1),
+ encode_usage_percent(-1),
+ capture_queue_delay_ms_per_s(-1) {}
+
+ int capture_jitter_ms; // The current estimated jitter in ms based on
+ // incoming captured frames.
+ int avg_encode_time_ms; // The average encode time in ms.
+ int encode_usage_percent; // The average encode time divided by the average
+ // time difference between incoming captured frames.
+ int capture_queue_delay_ms_per_s; // The current time delay between an
+ // incoming captured frame until the frame
+ // is being processed. The delay is
+ // expressed in ms delay per second.
+};
+
class WEBRTC_DLLEXPORT VideoEngine {
public:
// Creates a VideoEngine object, which can then be used to acquire subā€APIs.
@@ -193,16 +211,12 @@ class WEBRTC_DLLEXPORT ViEBase {
}
// Gets cpu overuse measures.
- // capture_jitter_ms: The current estimated jitter in ms based on incoming
- // captured frames.
- // avg_encode_time_ms: The average encode time in ms.
- // encode_usage_percent: The average encode time divided by the average time
- // difference between incoming captured frames.
- // capture_queue_delay_ms_per_s: The current time delay between an incoming
- // captured frame until the frame is being
- // processed. The delay is expressed in ms
- // delay per second.
// TODO(asapersson): Remove default implementation.
+ virtual int GetCpuOveruseMetrics(int channel,
+ CpuOveruseMetrics* metrics) {
+ return -1;
+ }
+ // TODO(asapersson): Remove this function when libjingle has been updated.
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
diff --git a/video_engine/mock/mock_vie_frame_provider_base.h b/video_engine/mock/mock_vie_frame_provider_base.h
new file mode 100644
index 00000000..d4e17f4d
--- /dev/null
+++ b/video_engine/mock/mock_vie_frame_provider_base.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
+
+#include "webrtc/video_engine/vie_frame_provider_base.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockViEFrameCallback : public ViEFrameCallback {
+ public:
+ MOCK_METHOD4(DeliverFrame,
+ void(int id,
+ I420VideoFrame* video_frame,
+ int num_csrcs,
+ const uint32_t CSRC[kRtpCsrcSize]));
+ MOCK_METHOD2(DelayChanged, void(int id, int frame_delay));
+ MOCK_METHOD3(GetPreferedFrameSettings,
+ int(int* width, int* height, int* frame_rate));
+ MOCK_METHOD1(ProviderDestroyed, void(int id));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
diff --git a/video_engine/overuse_frame_detector.cc b/video_engine/overuse_frame_detector.cc
index 078c89a5..c136130b 100644
--- a/video_engine/overuse_frame_detector.cc
+++ b/video_engine/overuse_frame_detector.cc
@@ -302,29 +302,19 @@ void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) {
ResetAll(num_pixels_);
}
-int OveruseFrameDetector::CaptureJitterMs() const {
- CriticalSectionScoped cs(crit_.get());
- return static_cast<int>(capture_deltas_.StdDev() + 0.5);
-}
-
-int OveruseFrameDetector::AvgEncodeTimeMs() const {
- CriticalSectionScoped cs(crit_.get());
- return encode_time_->filtered_encode_time_ms();
-}
-
-int OveruseFrameDetector::EncodeUsagePercent() const {
- CriticalSectionScoped cs(crit_.get());
- return encode_usage_->UsageInPercent();
-}
-
-int OveruseFrameDetector::AvgCaptureQueueDelayMsPerS() const {
+int OveruseFrameDetector::CaptureQueueDelayMsPerS() const {
CriticalSectionScoped cs(crit_.get());
- return capture_queue_delay_->filtered_delay_ms_per_s();
+ return capture_queue_delay_->delay_ms();
}
-int OveruseFrameDetector::CaptureQueueDelayMsPerS() const {
+void OveruseFrameDetector::GetCpuOveruseMetrics(
+ CpuOveruseMetrics* metrics) const {
CriticalSectionScoped cs(crit_.get());
- return capture_queue_delay_->delay_ms();
+ metrics->capture_jitter_ms = static_cast<int>(capture_deltas_.StdDev() + 0.5);
+ metrics->avg_encode_time_ms = encode_time_->filtered_encode_time_ms();
+ metrics->encode_usage_percent = encode_usage_->UsageInPercent();
+ metrics->capture_queue_delay_ms_per_s =
+ capture_queue_delay_->filtered_delay_ms_per_s();
}
int32_t OveruseFrameDetector::TimeUntilNextProcess() {
diff --git a/video_engine/overuse_frame_detector.h b/video_engine/overuse_frame_detector.h
index c30bb57d..38b927ba 100644
--- a/video_engine/overuse_frame_detector.h
+++ b/video_engine/overuse_frame_detector.h
@@ -70,25 +70,23 @@ class OveruseFrameDetector : public Module {
void FrameEncoded(int encode_time_ms);
// Accessors.
- // The estimated jitter based on incoming captured frames.
- int CaptureJitterMs() const;
-
- // Running average of reported encode time (FrameEncoded()).
- // Only used for stats.
- int AvgEncodeTimeMs() const;
-
- // The average encode time divided by the average time difference between
- // incoming captured frames.
- // This variable is currently only used for statistics.
- int EncodeUsagePercent() const;
-
- // The current time delay between an incoming captured frame (FrameCaptured())
- // until the frame is being processed (FrameProcessingStarted()).
- // (Note: if a new frame is received before an old frame has been processed,
- // the old frame is skipped).
- // The delay is returned as the delay in ms per second.
- // This variable is currently only used for statistics.
- int AvgCaptureQueueDelayMsPerS() const;
+
+ // Returns CpuOveruseMetrics where
+ // capture_jitter_ms: The estimated jitter based on incoming captured frames.
+ // avg_encode_time_ms: Running average of reported encode time
+ // (FrameEncoded()). Only used for stats.
+ // encode_usage_percent: The average encode time divided by the average time
+ // difference between incoming captured frames.
+ // capture_queue_delay_ms_per_s: The current time delay between an incoming
+ // captured frame (FrameCaptured()) until the
+ // frame is being processed
+ // (FrameProcessingStarted()). (Note: if a new
+ // frame is received before an old frame has
+ // been processed, the old frame is skipped).
+ // The delay is expressed in ms delay per sec.
+ // Only used for stats.
+ void GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const;
+
int CaptureQueueDelayMsPerS() const;
// Implements Module.
diff --git a/video_engine/overuse_frame_detector_unittest.cc b/video_engine/overuse_frame_detector_unittest.cc
index a760fbfb..2d7116f8 100644
--- a/video_engine/overuse_frame_detector_unittest.cc
+++ b/video_engine/overuse_frame_detector_unittest.cc
@@ -118,6 +118,24 @@ class OveruseFrameDetectorTest : public ::testing::Test {
overuse_detector_->Process();
}
+ int CaptureJitterMs() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.capture_jitter_ms;
+ }
+
+ int AvgEncodeTimeMs() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.avg_encode_time_ms;
+ }
+
+ int EncodeUsagePercent() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.encode_usage_percent;
+ }
+
CpuOveruseOptions options_;
scoped_ptr<SimulatedClock> clock_;
scoped_ptr<MockCpuOveruseObserver> observer_;
@@ -196,49 +214,58 @@ TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
TriggerOveruse(1);
}
+TEST_F(OveruseFrameDetectorTest, GetCpuOveruseMetrics) {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ EXPECT_GT(metrics.capture_jitter_ms, 0);
+ EXPECT_GT(metrics.avg_encode_time_ms, 0);
+ EXPECT_GT(metrics.encode_usage_percent, 0);
+ EXPECT_GE(metrics.capture_queue_delay_ms_per_s, 0);
+}
+
TEST_F(OveruseFrameDetectorTest, CaptureJitter) {
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
- EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterResolutionChange) {
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
- EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
// Verify reset.
InsertFramesWithInterval(1, kFrameInterval33ms, kWidth, kHeight + 1);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterFrameTimeout) {
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
- EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
InsertFramesWithInterval(
1, options_.frame_timeout_interval_ms, kWidth, kHeight);
- EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
// Verify reset.
InsertFramesWithInterval(
1, options_.frame_timeout_interval_ms + 1, kWidth, kHeight);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterChangingThreshold) {
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
options_.high_capture_jitter_threshold_ms = 90.0f;
overuse_detector_->SetOptions(options_);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
options_.low_capture_jitter_threshold_ms = 30.0f;
overuse_detector_->SetOptions(options_);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdatingCaptureJitter) {
options_.min_frame_samples = 40;
overuse_detector_->SetOptions(options_);
InsertFramesWithInterval(40, kFrameInterval33ms, kWidth, kHeight);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, NoCaptureQueueDelay) {
@@ -289,33 +316,33 @@ TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayNoMatchingCapturedFrame) {
TEST_F(OveruseFrameDetectorTest, EncodedFrame) {
const int kInitialAvgEncodeTimeInMs = 5;
- EXPECT_EQ(kInitialAvgEncodeTimeInMs, overuse_detector_->AvgEncodeTimeMs());
+ EXPECT_EQ(kInitialAvgEncodeTimeInMs, AvgEncodeTimeMs());
for (int i = 0; i < 30; i++) {
clock_->AdvanceTimeMilliseconds(33);
overuse_detector_->FrameEncoded(2);
}
- EXPECT_EQ(2, overuse_detector_->AvgEncodeTimeMs());
+ EXPECT_EQ(2, AvgEncodeTimeMs());
}
TEST_F(OveruseFrameDetectorTest, InitialEncodeUsage) {
- EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
}
TEST_F(OveruseFrameDetectorTest, EncodedUsage) {
const int kEncodeTimeMs = 5;
InsertAndEncodeFramesWithInterval(
1000, kFrameInterval33ms, kWidth, kHeight, kEncodeTimeMs);
- EXPECT_EQ(15, overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(15, EncodeUsagePercent());
}
TEST_F(OveruseFrameDetectorTest, EncodeUsageResetAfterChangingThreshold) {
- EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
options_.high_encode_usage_threshold_percent = 100;
overuse_detector_->SetOptions(options_);
- EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
options_.low_encode_usage_threshold_percent = 20;
overuse_detector_->SetOptions(options_);
- EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
}
TEST_F(OveruseFrameDetectorTest, TriggerOveruseWithEncodeUsage) {
diff --git a/video_engine/video_engine_core.gypi b/video_engine/video_engine_core.gypi
index 57cdecd8..dfb48b4e 100644
--- a/video_engine/video_engine_core.gypi
+++ b/video_engine/video_engine_core.gypi
@@ -131,6 +131,7 @@
'encoder_state_feedback_unittest.cc',
'overuse_frame_detector_unittest.cc',
'stream_synchronization_unittest.cc',
+ 'vie_capturer_unittest.cc',
'vie_codec_unittest.cc',
'vie_remb_unittest.cc',
],
diff --git a/video_engine/vie_base_impl.cc b/video_engine/vie_base_impl.cc
index f4b87e8f..29fbe7fb 100644
--- a/video_engine/vie_base_impl.cc
+++ b/video_engine/vie_base_impl.cc
@@ -140,10 +140,35 @@ int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
if (provider) {
ViECapturer* capturer = is.Capture(provider->Id());
if (capturer) {
- capturer->CpuOveruseMeasures(capture_jitter_ms,
- avg_encode_time_ms,
- encode_usage_percent,
- capture_queue_delay_ms_per_s);
+ CpuOveruseMetrics metrics;
+ capturer->GetCpuOveruseMetrics(&metrics);
+ *capture_jitter_ms = metrics.capture_jitter_ms;
+ *avg_encode_time_ms = metrics.avg_encode_time_ms;
+ *encode_usage_percent = metrics.encode_usage_percent;
+ *capture_queue_delay_ms_per_s = metrics.capture_queue_delay_ms_per_s;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int ViEBaseImpl::GetCpuOveruseMetrics(int video_channel,
+ CpuOveruseMetrics* metrics) {
+ ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ shared_data_.SetLastError(kViEBaseInvalidChannelId);
+ return -1;
+ }
+ ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+ assert(vie_encoder);
+
+ ViEInputManagerScoped is(*(shared_data_.input_manager()));
+ ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder);
+ if (provider) {
+ ViECapturer* capturer = is.Capture(provider->Id());
+ if (capturer) {
+ capturer->GetCpuOveruseMetrics(metrics);
return 0;
}
}
diff --git a/video_engine/vie_base_impl.h b/video_engine/vie_base_impl.h
index 52c888e3..d6a046e6 100644
--- a/video_engine/vie_base_impl.h
+++ b/video_engine/vie_base_impl.h
@@ -35,6 +35,8 @@ class ViEBaseImpl
CpuOveruseObserver* observer);
virtual int SetCpuOveruseOptions(int channel,
const CpuOveruseOptions& options);
+ virtual int GetCpuOveruseMetrics(int channel,
+ CpuOveruseMetrics* metrics);
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
diff --git a/video_engine/vie_capturer.cc b/video_engine/vie_capturer.cc
index f037dc82..30d66335 100644
--- a/video_engine/vie_capturer.cc
+++ b/video_engine/vie_capturer.cc
@@ -10,6 +10,7 @@
#include "webrtc/video_engine/vie_capturer.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/utility/interface/process_thread.h"
@@ -249,15 +250,8 @@ void ViECapturer::SetCpuOveruseOptions(const CpuOveruseOptions& options) {
overuse_detector_->SetOptions(options);
}
-void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s) const {
- *capture_jitter_ms = overuse_detector_->CaptureJitterMs();
- *avg_encode_time_ms = overuse_detector_->AvgEncodeTimeMs();
- *encode_usage_percent = overuse_detector_->EncodeUsagePercent();
- *capture_queue_delay_ms_per_s =
- overuse_detector_->AvgCaptureQueueDelayMsPerS();
+void ViECapturer::GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const {
+ overuse_detector_->GetCpuOveruseMetrics(metrics);
}
int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
@@ -353,11 +347,16 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
- captured_frame_.SwapFrame(&video_frame);
+ if (video_frame.native_handle() != NULL) {
+ captured_frame_.reset(video_frame.CloneFrame());
+ } else {
+ if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
+ captured_frame_.reset(new I420VideoFrame());
+ captured_frame_->SwapFrame(&video_frame);
+ }
capture_event_.Set();
- overuse_detector_->FrameCaptured(captured_frame_.width(),
- captured_frame_.height());
- return;
+ overuse_detector_->FrameCaptured(captured_frame_->width(),
+ captured_frame_->height());
}
void ViECapturer::OnCaptureDelayChanged(const int32_t id,
@@ -480,7 +479,9 @@ bool ViECapturer::ViECaptureProcess() {
deliver_cs_->Enter();
if (SwapCapturedAndDeliverFrameIfAvailable()) {
encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
- DeliverI420Frame(&deliver_frame_);
+ DeliverI420Frame(deliver_frame_.get());
+ if (deliver_frame_->native_handle() != NULL)
+ deliver_frame_.reset(); // Release the texture so it can be reused.
}
deliver_cs_->Leave();
if (current_brightness_level_ != reported_brightness_level_) {
@@ -501,6 +502,11 @@ bool ViECapturer::ViECaptureProcess() {
}
void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
+ if (video_frame->native_handle() != NULL) {
+ ViEFrameProviderBase::DeliverFrame(video_frame);
+ return;
+ }
+
// Apply image enhancement and effect filter.
if (deflicker_frame_stats_) {
if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
@@ -615,11 +621,21 @@ void ViECapturer::OnNoPictureAlarm(const int32_t id,
bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
CriticalSectionScoped cs(capture_cs_.get());
- if (captured_frame_.IsZeroSize())
+ if (captured_frame_ == NULL)
+ return false;
+
+ if (captured_frame_->native_handle() != NULL) {
+ deliver_frame_.reset(captured_frame_.release());
+ return true;
+ }
+
+ if (captured_frame_->IsZeroSize())
return false;
- deliver_frame_.SwapFrame(&captured_frame_);
- captured_frame_.ResetSize();
+ if (deliver_frame_ == NULL)
+ deliver_frame_.reset(new I420VideoFrame());
+ deliver_frame_->SwapFrame(captured_frame_.get());
+ captured_frame_->ResetSize();
return true;
}
diff --git a/video_engine/vie_capturer.h b/video_engine/vie_capturer.h
index 37f203a8..8e893577 100644
--- a/video_engine/vie_capturer.h
+++ b/video_engine/vie_capturer.h
@@ -108,11 +108,7 @@ class ViECapturer
void RegisterCpuOveruseObserver(CpuOveruseObserver* observer);
void SetCpuOveruseOptions(const CpuOveruseOptions& options);
-
- void CpuOveruseMeasures(int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s) const;
+ void GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const;
protected:
ViECapturer(int capture_id,
@@ -173,8 +169,8 @@ class ViECapturer
EventWrapper& capture_event_;
EventWrapper& deliver_event_;
- I420VideoFrame captured_frame_;
- I420VideoFrame deliver_frame_;
+ scoped_ptr<I420VideoFrame> captured_frame_;
+ scoped_ptr<I420VideoFrame> deliver_frame_;
// Image processing.
ViEEffectFilter* effect_filter_;
diff --git a/video_engine/vie_capturer_unittest.cc b/video_engine/vie_capturer_unittest.cc
new file mode 100644
index 00000000..edaf13b5
--- /dev/null
+++ b/video_engine/vie_capturer_unittest.cc
@@ -0,0 +1,263 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file includes unit tests for ViECapturer.
+
+#include "webrtc/video_engine/vie_capturer.h"
+
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common.h"
+#include "webrtc/common_video/interface/native_handle.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/modules/video_capture/include/mock/mock_video_capture.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/ref_count.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
+#include "webrtc/video_engine/mock/mock_vie_frame_provider_base.h"
+
+using ::testing::_;
+using ::testing::Invoke;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::WithArg;
+
+// If an output frame does not arrive in 500ms, the test will fail.
+#define FRAME_TIMEOUT_MS 500
+
+namespace webrtc {
+
+bool EqualFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualBufferFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1,
+ const ScopedVector<I420VideoFrame>& frames2);
+I420VideoFrame* CreateI420VideoFrame(uint8_t length);
+
+class FakeNativeHandle : public NativeHandle {
+ public:
+ FakeNativeHandle() {}
+ virtual ~FakeNativeHandle() {}
+ virtual void* GetHandle() { return NULL; }
+};
+
+class ViECapturerTest : public ::testing::Test {
+ protected:
+ ViECapturerTest()
+ : mock_capture_module_(new NiceMock<MockVideoCaptureModule>()),
+ mock_process_thread_(new NiceMock<MockProcessThread>),
+ mock_frame_callback_(new NiceMock<MockViEFrameCallback>),
+ data_callback_(NULL),
+ output_frame_event_(EventWrapper::Create()) {
+ }
+
+ virtual void SetUp() {
+ EXPECT_CALL(*mock_capture_module_, RegisterCaptureDataCallback(_))
+ .WillRepeatedly(Invoke(this, &ViECapturerTest::SetCaptureDataCallback));
+ EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_, _, _, _))
+ .WillRepeatedly(
+ WithArg<1>(Invoke(this, &ViECapturerTest::AddOutputFrame)));
+
+ Config config;
+ vie_capturer_.reset(
+ ViECapturer::CreateViECapture(
+ 0, 0, config, mock_capture_module_.get(), *mock_process_thread_));
+ vie_capturer_->RegisterFrameCallback(0, mock_frame_callback_.get());
+ }
+
+ virtual void TearDown() {
+ // ViECapturer accesses |mock_process_thread_| in destructor and should
+ // be deleted first.
+ vie_capturer_.reset();
+ }
+
+ void SetCaptureDataCallback(VideoCaptureDataCallback& data_callback) {
+ data_callback_ = &data_callback;
+ }
+
+ void AddInputFrame(I420VideoFrame* frame) {
+ data_callback_->OnIncomingCapturedFrame(0, *frame);
+ }
+
+ void AddOutputFrame(I420VideoFrame* frame) {
+ if (frame->native_handle() == NULL)
+ output_frame_ybuffers_.push_back(frame->buffer(kYPlane));
+ // Clone the frames because ViECapturer owns the frames.
+ output_frames_.push_back(frame->CloneFrame());
+ output_frame_event_->Set();
+ }
+
+ void WaitOutputFrame() {
+ EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ }
+
+ scoped_ptr<MockVideoCaptureModule> mock_capture_module_;
+ scoped_ptr<MockProcessThread> mock_process_thread_;
+ scoped_ptr<MockViEFrameCallback> mock_frame_callback_;
+
+ // Used to send input capture frames to ViECapturer.
+ VideoCaptureDataCallback* data_callback_;
+
+ scoped_ptr<ViECapturer> vie_capturer_;
+
+ // Input capture frames of ViECapturer.
+ ScopedVector<I420VideoFrame> input_frames_;
+
+ // Indicate an output frame has arrived.
+ scoped_ptr<EventWrapper> output_frame_event_;
+
+ // Output delivered frames of ViECaptuer.
+ ScopedVector<I420VideoFrame> output_frames_;
+
+ // The pointers of Y plane buffers of output frames. This is used to verify
+ // the frame are swapped and not copied.
+ std::vector<uint8_t*> output_frame_ybuffers_;
+};
+
+TEST_F(ViECapturerTest, TestTextureFrames) {
+ const int kNumFrame = 3;
+ for (int i = 0 ; i < kNumFrame; ++i) {
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, i, i, i, i));
+ AddInputFrame(input_frames_[i]);
+ WaitOutputFrame();
+ }
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+TEST_F(ViECapturerTest, TestI420Frames) {
+ const int kNumFrame = 4;
+ ScopedVector<I420VideoFrame> copied_input_frames;
+ std::vector<uint8_t*> ybuffer_pointers;
+ for (int i = 0; i < kNumFrame; ++i) {
+ input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1)));
+ ybuffer_pointers.push_back(input_frames_[i]->buffer(kYPlane));
+ // Copy input frames because the buffer data will be swapped.
+ copied_input_frames.push_back(input_frames_[i]->CloneFrame());
+ AddInputFrame(input_frames_[i]);
+ WaitOutputFrame();
+ }
+
+ EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_));
+ // Make sure the buffer is swapped and not copied.
+ for (int i = 0; i < kNumFrame; ++i)
+ EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
+ // The pipeline should be filled with frames with allocated buffers. Check
+ // the last input frame has the same allocated size after swapping.
+ EXPECT_EQ(input_frames_.back()->allocated_size(kYPlane),
+ copied_input_frames.back()->allocated_size(kYPlane));
+}
+
+TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) {
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1));
+ AddInputFrame(input_frames_[0]);
+ WaitOutputFrame();
+
+ input_frames_.push_back(CreateI420VideoFrame(1));
+ scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[1]->CloneFrame());
+ AddInputFrame(copied_input_frame.get());
+ WaitOutputFrame();
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) {
+ input_frames_.push_back(CreateI420VideoFrame(1));
+ scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[0]->CloneFrame());
+ AddInputFrame(copied_input_frame.get());
+ WaitOutputFrame();
+
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1));
+ AddInputFrame(input_frames_[1]);
+ WaitOutputFrame();
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+bool EqualFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ if (frame1.native_handle() != NULL || frame2.native_handle() != NULL)
+ return EqualTextureFrames(frame1, frame2);
+ return EqualBufferFrames(frame1, frame2);
+}
+
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ return ((frame1.native_handle() == frame2.native_handle()) &&
+ (frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height()) &&
+ (frame1.timestamp() == frame2.timestamp()) &&
+ (frame1.render_time_ms() == frame2.render_time_ms()));
+}
+
+bool EqualBufferFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ return ((frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height()) &&
+ (frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
+ (frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
+ (frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
+ (frame1.timestamp() == frame2.timestamp()) &&
+ (frame1.ntp_time_ms() == frame2.ntp_time_ms()) &&
+ (frame1.render_time_ms() == frame2.render_time_ms()) &&
+ (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
+ (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
+ (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) &&
+ (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane),
+ frame1.allocated_size(kYPlane)) == 0) &&
+ (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane),
+ frame1.allocated_size(kUPlane)) == 0) &&
+ (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane),
+ frame1.allocated_size(kVPlane)) == 0));
+}
+
+bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1,
+ const ScopedVector<I420VideoFrame>& frames2) {
+ if (frames1.size() != frames2.size())
+ return false;
+ for (size_t i = 0; i < frames1.size(); ++i) {
+ if (!EqualFrames(*frames1[i], *frames2[i]))
+ return false;
+ }
+ return true;
+}
+
+I420VideoFrame* CreateI420VideoFrame(uint8_t data) {
+ I420VideoFrame* frame = new I420VideoFrame();
+ const int width = 36;
+ const int height = 24;
+ const int kSizeY = width * height * 2;
+ const int kSizeUV = width * height;
+ uint8_t buffer[kSizeY];
+ memset(buffer, data, kSizeY);
+ frame->CreateFrame(
+ kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width,
+ width / 2, width / 2);
+ frame->set_timestamp(data);
+ frame->set_ntp_time_ms(data);
+ frame->set_render_time_ms(data);
+ return frame;
+}
+
+} // namespace webrtc
diff --git a/video_engine/vie_encoder.cc b/video_engine/vie_encoder.cc
index afb6d0c6..40a61deb 100644
--- a/video_engine/vie_encoder.cc
+++ b/video_engine/vie_encoder.cc
@@ -487,6 +487,10 @@ void ViEEncoder::DeliverFrame(int id,
}
encoder_paused_and_dropped_frame_ = false;
}
+ if (video_frame->native_handle() != NULL) {
+ // TODO(wuchengli): add texture support. http://crbug.com/362437
+ return;
+ }
// Convert render time, in ms, to RTP timestamp.
const int kMsToRtpTimestamp = 90;
diff --git a/video_engine/vie_remb_unittest.cc b/video_engine/vie_remb_unittest.cc
index cdfe39c7..1f0b70c5 100644
--- a/video_engine/vie_remb_unittest.cc
+++ b/video_engine/vie_remb_unittest.cc
@@ -18,35 +18,26 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/video_engine/vie_remb.h"
using ::testing::_;
using ::testing::AnyNumber;
+using ::testing::NiceMock;
using ::testing::Return;
namespace webrtc {
-class TestProcessThread : public ProcessThread {
- public:
- explicit TestProcessThread() {}
- ~TestProcessThread() {}
- virtual int32_t Start() { return 0; }
- virtual int32_t Stop() { return 0; }
- virtual int32_t RegisterModule(Module* module) { return 0; }
- virtual int32_t DeRegisterModule(const Module* module) { return 0; }
-};
-
class ViERembTest : public ::testing::Test {
protected:
virtual void SetUp() {
TickTime::UseFakeClock(12345);
- process_thread_.reset(new TestProcessThread);
+ process_thread_.reset(new NiceMock<MockProcessThread>);
vie_remb_.reset(new VieRemb());
}
- scoped_ptr<TestProcessThread> process_thread_;
+ scoped_ptr<MockProcessThread> process_thread_;
scoped_ptr<VieRemb> vie_remb_;
};