aboutsummaryrefslogtreecommitdiff
path: root/webrtc/video
diff options
context:
space:
mode:
Diffstat (limited to 'webrtc/video')
-rw-r--r--webrtc/video/BUILD.gn47
-rw-r--r--webrtc/video/call_stats.cc168
-rw-r--r--webrtc/video/call_stats.h83
-rw-r--r--webrtc/video/call_stats_unittest.cc204
-rw-r--r--webrtc/video/encoded_frame_callback_adapter.cc2
-rw-r--r--webrtc/video/encoded_frame_callback_adapter.h2
-rw-r--r--webrtc/video/encoder_state_feedback.cc124
-rw-r--r--webrtc/video/encoder_state_feedback.h71
-rw-r--r--webrtc/video/encoder_state_feedback_unittest.cc143
-rw-r--r--webrtc/video/end_to_end_tests.cc1000
-rw-r--r--webrtc/video/full_stack.cc50
-rw-r--r--webrtc/video/overuse_frame_detector.cc364
-rw-r--r--webrtc/video/overuse_frame_detector.h164
-rw-r--r--webrtc/video/overuse_frame_detector_unittest.cc310
-rw-r--r--webrtc/video/payload_router.cc101
-rw-r--r--webrtc/video/payload_router.h85
-rw-r--r--webrtc/video/payload_router_unittest.cc209
-rw-r--r--webrtc/video/rampup_tests.cc509
-rw-r--r--webrtc/video/rampup_tests.h135
-rw-r--r--webrtc/video/receive_statistics_proxy.cc31
-rw-r--r--webrtc/video/receive_statistics_proxy.h7
-rw-r--r--webrtc/video/replay.cc2
-rw-r--r--webrtc/video/report_block_stats.cc111
-rw-r--r--webrtc/video/report_block_stats.h62
-rw-r--r--webrtc/video/report_block_stats_unittest.cc146
-rw-r--r--webrtc/video/screenshare_loopback.cc153
-rw-r--r--webrtc/video/send_statistics_proxy.cc173
-rw-r--r--webrtc/video/send_statistics_proxy.h67
-rw-r--r--webrtc/video/send_statistics_proxy_unittest.cc33
-rw-r--r--webrtc/video/stream_synchronization.cc226
-rw-r--r--webrtc/video/stream_synchronization.h59
-rw-r--r--webrtc/video/stream_synchronization_unittest.cc563
-rw-r--r--webrtc/video/video_capture_input.cc34
-rw-r--r--webrtc/video/video_capture_input.h19
-rw-r--r--webrtc/video/video_capture_input_unittest.cc36
-rw-r--r--webrtc/video/video_decoder.cc15
-rw-r--r--webrtc/video/video_decoder_unittest.cc30
-rw-r--r--webrtc/video/video_encoder.cc9
-rw-r--r--webrtc/video/video_encoder_unittest.cc15
-rw-r--r--webrtc/video/video_loopback.cc143
-rw-r--r--webrtc/video/video_quality_test.cc572
-rw-r--r--webrtc/video/video_quality_test.h49
-rw-r--r--webrtc/video/video_receive_stream.cc40
-rw-r--r--webrtc/video/video_receive_stream.h8
-rw-r--r--webrtc/video/video_send_stream.cc85
-rw-r--r--webrtc/video/video_send_stream.h9
-rw-r--r--webrtc/video/video_send_stream_tests.cc914
-rw-r--r--webrtc/video/vie_channel.cc1218
-rw-r--r--webrtc/video/vie_channel.h454
-rw-r--r--webrtc/video/vie_codec_unittest.cc230
-rw-r--r--webrtc/video/vie_encoder.cc634
-rw-r--r--webrtc/video/vie_encoder.h196
-rw-r--r--webrtc/video/vie_receiver.cc483
-rw-r--r--webrtc/video/vie_receiver.h132
-rw-r--r--webrtc/video/vie_remb.cc144
-rw-r--r--webrtc/video/vie_remb.h79
-rw-r--r--webrtc/video/vie_remb_unittest.cc253
-rw-r--r--webrtc/video/vie_sync_module.cc174
-rw-r--r--webrtc/video/vie_sync_module.h62
-rw-r--r--webrtc/video/webrtc_video.gypi45
60 files changed, 9620 insertions, 1866 deletions
diff --git a/webrtc/video/BUILD.gn b/webrtc/video/BUILD.gn
index 408bb366f0..e35772e22c 100644
--- a/webrtc/video/BUILD.gn
+++ b/webrtc/video/BUILD.gn
@@ -10,35 +10,24 @@ import("../build/webrtc.gni")
source_set("video") {
sources = [
- "../video_engine/call_stats.cc",
- "../video_engine/call_stats.h",
- "../video_engine/encoder_state_feedback.cc",
- "../video_engine/encoder_state_feedback.h",
- "../video_engine/overuse_frame_detector.cc",
- "../video_engine/overuse_frame_detector.h",
- "../video_engine/payload_router.cc",
- "../video_engine/payload_router.h",
- "../video_engine/report_block_stats.cc",
- "../video_engine/report_block_stats.h",
- "../video_engine/stream_synchronization.cc",
- "../video_engine/stream_synchronization.h",
- "../video_engine/vie_channel.cc",
- "../video_engine/vie_channel.h",
- "../video_engine/vie_defines.h",
- "../video_engine/vie_encoder.cc",
- "../video_engine/vie_encoder.h",
- "../video_engine/vie_receiver.cc",
- "../video_engine/vie_receiver.h",
- "../video_engine/vie_remb.cc",
- "../video_engine/vie_remb.h",
- "../video_engine/vie_sync_module.cc",
- "../video_engine/vie_sync_module.h",
+ "call_stats.cc",
+ "call_stats.h",
"encoded_frame_callback_adapter.cc",
"encoded_frame_callback_adapter.h",
+ "encoder_state_feedback.cc",
+ "encoder_state_feedback.h",
+ "overuse_frame_detector.cc",
+ "overuse_frame_detector.h",
+ "payload_router.cc",
+ "payload_router.h",
"receive_statistics_proxy.cc",
"receive_statistics_proxy.h",
+ "report_block_stats.cc",
+ "report_block_stats.h",
"send_statistics_proxy.cc",
"send_statistics_proxy.h",
+ "stream_synchronization.cc",
+ "stream_synchronization.h",
"video_capture_input.cc",
"video_capture_input.h",
"video_decoder.cc",
@@ -47,6 +36,16 @@ source_set("video") {
"video_receive_stream.h",
"video_send_stream.cc",
"video_send_stream.h",
+ "vie_channel.cc",
+ "vie_channel.h",
+ "vie_encoder.cc",
+ "vie_encoder.h",
+ "vie_receiver.cc",
+ "vie_receiver.h",
+ "vie_remb.cc",
+ "vie_remb.h",
+ "vie_sync_module.cc",
+ "vie_sync_module.h",
]
configs += [ "..:common_config" ]
@@ -70,7 +69,7 @@ source_set("video") {
"../modules/video_coding",
"../modules/video_processing",
"../modules/video_render:video_render_module",
- "../voice_engine",
"../system_wrappers",
+ "../voice_engine",
]
}
diff --git a/webrtc/video/call_stats.cc b/webrtc/video/call_stats.cc
new file mode 100644
index 0000000000..69ea1a3d78
--- /dev/null
+++ b/webrtc/video/call_stats.cc
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/call_stats.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc {
+namespace {
+// Time interval for updating the observers.
+const int64_t kUpdateIntervalMs = 1000;
+// Weight factor to apply to the average rtt.
+const float kWeightFactor = 0.3f;
+
+void RemoveOldReports(int64_t now, std::list<CallStats::RttTime>* reports) {
+ // A rtt report is considered valid for this long.
+ const int64_t kRttTimeoutMs = 1500;
+ while (!reports->empty() &&
+ (now - reports->front().time) > kRttTimeoutMs) {
+ reports->pop_front();
+ }
+}
+
+int64_t GetMaxRttMs(std::list<CallStats::RttTime>* reports) {
+ int64_t max_rtt_ms = 0;
+ for (std::list<CallStats::RttTime>::const_iterator it = reports->begin();
+ it != reports->end(); ++it) {
+ max_rtt_ms = std::max(it->rtt, max_rtt_ms);
+ }
+ return max_rtt_ms;
+}
+
+int64_t GetAvgRttMs(std::list<CallStats::RttTime>* reports) {
+ if (reports->empty()) {
+ return 0;
+ }
+ int64_t sum = 0;
+ for (std::list<CallStats::RttTime>::const_iterator it = reports->begin();
+ it != reports->end(); ++it) {
+ sum += it->rtt;
+ }
+ return sum / reports->size();
+}
+
+void UpdateAvgRttMs(std::list<CallStats::RttTime>* reports, int64_t* avg_rtt) {
+ uint32_t cur_rtt_ms = GetAvgRttMs(reports);
+ if (cur_rtt_ms == 0) {
+ // Reset.
+ *avg_rtt = 0;
+ return;
+ }
+ if (*avg_rtt == 0) {
+ // Initialize.
+ *avg_rtt = cur_rtt_ms;
+ return;
+ }
+ *avg_rtt = *avg_rtt * (1.0f - kWeightFactor) + cur_rtt_ms * kWeightFactor;
+}
+} // namespace
+
+class RtcpObserver : public RtcpRttStats {
+ public:
+ explicit RtcpObserver(CallStats* owner) : owner_(owner) {}
+ virtual ~RtcpObserver() {}
+
+ virtual void OnRttUpdate(int64_t rtt) {
+ owner_->OnRttUpdate(rtt);
+ }
+
+ // Returns the average RTT.
+ virtual int64_t LastProcessedRtt() const {
+ return owner_->avg_rtt_ms();
+ }
+
+ private:
+ CallStats* owner_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(RtcpObserver);
+};
+
+CallStats::CallStats(Clock* clock)
+ : clock_(clock),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ rtcp_rtt_stats_(new RtcpObserver(this)),
+ last_process_time_(clock_->TimeInMilliseconds()),
+ max_rtt_ms_(0),
+ avg_rtt_ms_(0) {}
+
+CallStats::~CallStats() {
+ assert(observers_.empty());
+}
+
+int64_t CallStats::TimeUntilNextProcess() {
+ return last_process_time_ + kUpdateIntervalMs - clock_->TimeInMilliseconds();
+}
+
+int32_t CallStats::Process() {
+ CriticalSectionScoped cs(crit_.get());
+ int64_t now = clock_->TimeInMilliseconds();
+ if (now < last_process_time_ + kUpdateIntervalMs)
+ return 0;
+
+ last_process_time_ = now;
+
+ RemoveOldReports(now, &reports_);
+ max_rtt_ms_ = GetMaxRttMs(&reports_);
+ UpdateAvgRttMs(&reports_, &avg_rtt_ms_);
+
+ // If there is a valid rtt, update all observers with the max rtt.
+ // TODO(asapersson): Consider changing this to report the average rtt.
+ if (max_rtt_ms_ > 0) {
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ (*it)->OnRttUpdate(avg_rtt_ms_, max_rtt_ms_);
+ }
+ }
+ return 0;
+}
+
+int64_t CallStats::avg_rtt_ms() const {
+ CriticalSectionScoped cs(crit_.get());
+ return avg_rtt_ms_;
+}
+
+RtcpRttStats* CallStats::rtcp_rtt_stats() const {
+ return rtcp_rtt_stats_.get();
+}
+
+void CallStats::RegisterStatsObserver(CallStatsObserver* observer) {
+ CriticalSectionScoped cs(crit_.get());
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ if (*it == observer)
+ return;
+ }
+ observers_.push_back(observer);
+}
+
+void CallStats::DeregisterStatsObserver(CallStatsObserver* observer) {
+ CriticalSectionScoped cs(crit_.get());
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ if (*it == observer) {
+ observers_.erase(it);
+ return;
+ }
+ }
+}
+
+void CallStats::OnRttUpdate(int64_t rtt) {
+ CriticalSectionScoped cs(crit_.get());
+ reports_.push_back(RttTime(rtt, clock_->TimeInMilliseconds()));
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/call_stats.h b/webrtc/video/call_stats.h
new file mode 100644
index 0000000000..4ecd911b07
--- /dev/null
+++ b/webrtc/video/call_stats.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_CALL_STATS_H_
+#define WEBRTC_VIDEO_CALL_STATS_H_
+
+#include <list>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+class CallStatsObserver;
+class CriticalSectionWrapper;
+class RtcpRttStats;
+
+// CallStats keeps track of statistics for a call.
+class CallStats : public Module {
+ public:
+ friend class RtcpObserver;
+
+ explicit CallStats(Clock* clock);
+ ~CallStats();
+
+ // Implements Module, to use the process thread.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ // Returns a RtcpRttStats to register at a statistics provider. The object
+ // has the same lifetime as the CallStats instance.
+ RtcpRttStats* rtcp_rtt_stats() const;
+
+ // Registers/deregisters a new observer to receive statistics updates.
+ void RegisterStatsObserver(CallStatsObserver* observer);
+ void DeregisterStatsObserver(CallStatsObserver* observer);
+
+ // Helper struct keeping track of the time a rtt value is reported.
+ struct RttTime {
+ RttTime(int64_t new_rtt, int64_t rtt_time)
+ : rtt(new_rtt), time(rtt_time) {}
+ const int64_t rtt;
+ const int64_t time;
+ };
+
+ protected:
+ void OnRttUpdate(int64_t rtt);
+
+ int64_t avg_rtt_ms() const;
+
+ private:
+ Clock* const clock_;
+ // Protecting all members.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+ // Observer receiving statistics updates.
+ rtc::scoped_ptr<RtcpRttStats> rtcp_rtt_stats_;
+ // The last time 'Process' resulted in statistic update.
+ int64_t last_process_time_;
+ // The last RTT in the statistics update (zero if there is no valid estimate).
+ int64_t max_rtt_ms_;
+ int64_t avg_rtt_ms_;
+
+ // All Rtt reports within valid time interval, oldest first.
+ std::list<RttTime> reports_;
+
+ // Observers getting stats reports.
+ std::list<CallStatsObserver*> observers_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(CallStats);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_CALL_STATS_H_
diff --git a/webrtc/video/call_stats_unittest.cc b/webrtc/video/call_stats_unittest.cc
new file mode 100644
index 0000000000..6226a5bf6e
--- /dev/null
+++ b/webrtc/video/call_stats_unittest.cc
@@ -0,0 +1,204 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/call_stats.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::Return;
+
+namespace webrtc {
+
+class MockStatsObserver : public CallStatsObserver {
+ public:
+ MockStatsObserver() {}
+ virtual ~MockStatsObserver() {}
+
+ MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t));
+};
+
+class CallStatsTest : public ::testing::Test {
+ public:
+ CallStatsTest() : fake_clock_(12345) {}
+
+ protected:
+ virtual void SetUp() { call_stats_.reset(new CallStats(&fake_clock_)); }
+ SimulatedClock fake_clock_;
+ rtc::scoped_ptr<CallStats> call_stats_;
+};
+
+TEST_F(CallStatsTest, AddAndTriggerCallback) {
+ MockStatsObserver stats_observer;
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_EQ(0, rtcp_rtt_stats->LastProcessedRtt());
+
+ const int64_t kRtt = 25;
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kRtt, kRtt)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ const int64_t kRttTimeOutMs = 1500 + 10;
+ fake_clock_.AdvanceTimeMilliseconds(kRttTimeOutMs);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+ EXPECT_EQ(0, rtcp_rtt_stats->LastProcessedRtt());
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+TEST_F(CallStatsTest, ProcessTime) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ rtcp_rtt_stats->OnRttUpdate(100);
+
+ // Time isn't updated yet.
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+
+ // Advance clock and verify we get an update.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(1);
+ call_stats_->Process();
+
+ // Advance clock just too little to get an update.
+ fake_clock_.AdvanceTimeMilliseconds(999);
+ rtcp_rtt_stats->OnRttUpdate(100);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+
+ // Advance enough to trigger a new update.
+ fake_clock_.AdvanceTimeMilliseconds(1);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(1);
+ call_stats_->Process();
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+// Verify all observers get correct estimates and observers can be added and
+// removed.
+TEST_F(CallStatsTest, MultipleObservers) {
+ MockStatsObserver stats_observer_1;
+ call_stats_->RegisterStatsObserver(&stats_observer_1);
+ // Add the second observer twice, there should still be only one report to the
+ // observer.
+ MockStatsObserver stats_observer_2;
+ call_stats_->RegisterStatsObserver(&stats_observer_2);
+ call_stats_->RegisterStatsObserver(&stats_observer_2);
+
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ const int64_t kRtt = 100;
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+
+ // Verify both observers are updated.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(1);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(1);
+ call_stats_->Process();
+
+ // Deregister the second observer and verify update is only sent to the first
+ // observer.
+ call_stats_->DeregisterStatsObserver(&stats_observer_2);
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(1);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0);
+ call_stats_->Process();
+
+ // Deregister the first observer.
+ call_stats_->DeregisterStatsObserver(&stats_observer_1);
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(0);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0);
+ call_stats_->Process();
+}
+
+// Verify increasing and decreasing rtt triggers callbacks with correct values.
+TEST_F(CallStatsTest, ChangeRtt) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+
+ // Advance clock to be ready for an update.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+
+ // Set a first value and verify the callback is triggered.
+ const int64_t kFirstRtt = 100;
+ rtcp_rtt_stats->OnRttUpdate(kFirstRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kFirstRtt, kFirstRtt)).Times(1);
+ call_stats_->Process();
+
+ // Increase rtt and verify the new value is reported.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ const int64_t kHighRtt = kFirstRtt + 20;
+ const int64_t kAvgRtt1 = 103;
+ rtcp_rtt_stats->OnRttUpdate(kHighRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt1, kHighRtt)).Times(1);
+ call_stats_->Process();
+
+ // Increase time enough for a new update, but not too much to make the
+ // rtt invalid. Report a lower rtt and verify the old/high value still is sent
+ // in the callback.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ const int64_t kLowRtt = kFirstRtt - 20;
+ const int64_t kAvgRtt2 = 102;
+ rtcp_rtt_stats->OnRttUpdate(kLowRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt2, kHighRtt)).Times(1);
+ call_stats_->Process();
+
+ // Advance time to make the high report invalid, the lower rtt should now be
+ // in the callback.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ const int64_t kAvgRtt3 = 95;
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt3, kLowRtt)).Times(1);
+ call_stats_->Process();
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+TEST_F(CallStatsTest, LastProcessedRtt) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+
+ // Set a first values and verify that LastProcessedRtt initially returns the
+ // average rtt.
+ const int64_t kRttLow = 10;
+ const int64_t kRttHigh = 30;
+ const int64_t kAvgRtt = 20;
+ rtcp_rtt_stats->OnRttUpdate(kRttLow);
+ rtcp_rtt_stats->OnRttUpdate(kRttHigh);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt, kRttHigh)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kAvgRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ // Update values and verify LastProcessedRtt.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ rtcp_rtt_stats->OnRttUpdate(kRttLow);
+ rtcp_rtt_stats->OnRttUpdate(kRttHigh);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt, kRttHigh)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kAvgRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/encoded_frame_callback_adapter.cc b/webrtc/video/encoded_frame_callback_adapter.cc
index 407801fd0c..4c6823fa47 100644
--- a/webrtc/video/encoded_frame_callback_adapter.cc
+++ b/webrtc/video/encoded_frame_callback_adapter.cc
@@ -11,7 +11,7 @@
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/base/checks.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
namespace webrtc {
namespace internal {
diff --git a/webrtc/video/encoded_frame_callback_adapter.h b/webrtc/video/encoded_frame_callback_adapter.h
index b39a8e2167..b10c4f1645 100644
--- a/webrtc/video/encoded_frame_callback_adapter.h
+++ b/webrtc/video/encoded_frame_callback_adapter.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_
#define WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/frame_callback.h"
namespace webrtc {
diff --git a/webrtc/video/encoder_state_feedback.cc b/webrtc/video/encoder_state_feedback.cc
new file mode 100644
index 0000000000..c0c4b67dbd
--- /dev/null
+++ b/webrtc/video/encoder_state_feedback.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/encoder_state_feedback.h"
+
+#include <assert.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/video/vie_encoder.h"
+
+namespace webrtc {
+
+// Helper class registered at the RTP module relaying callbacks to
+// EncoderStatFeedback.
+class EncoderStateFeedbackObserver : public RtcpIntraFrameObserver {
+ public:
+ explicit EncoderStateFeedbackObserver(EncoderStateFeedback* owner)
+ : owner_(owner) {}
+ ~EncoderStateFeedbackObserver() {}
+
+ // Implements RtcpIntraFrameObserver.
+ virtual void OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ owner_->OnReceivedIntraFrameRequest(ssrc);
+ }
+ virtual void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
+ owner_->OnReceivedSLI(ssrc, picture_id);
+ }
+ virtual void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
+ owner_->OnReceivedRPSI(ssrc, picture_id);
+ }
+
+ virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
+ owner_->OnLocalSsrcChanged(old_ssrc, new_ssrc);
+ }
+
+ private:
+ EncoderStateFeedback* owner_;
+};
+
+EncoderStateFeedback::EncoderStateFeedback()
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ observer_(new EncoderStateFeedbackObserver(this)) {}
+
+EncoderStateFeedback::~EncoderStateFeedback() {
+ assert(encoders_.empty());
+}
+
+void EncoderStateFeedback::AddEncoder(const std::vector<uint32_t>& ssrcs,
+ ViEEncoder* encoder) {
+ RTC_DCHECK(!ssrcs.empty());
+ CriticalSectionScoped lock(crit_.get());
+ for (uint32_t ssrc : ssrcs) {
+ RTC_DCHECK(encoders_.find(ssrc) == encoders_.end());
+ encoders_[ssrc] = encoder;
+ }
+}
+
+void EncoderStateFeedback::RemoveEncoder(const ViEEncoder* encoder) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.begin();
+ while (it != encoders_.end()) {
+ if (it->second == encoder) {
+ encoders_.erase(it++);
+ } else {
+ ++it;
+ }
+ }
+}
+
+RtcpIntraFrameObserver* EncoderStateFeedback::GetRtcpIntraFrameObserver() {
+ return observer_.get();
+}
+
+void EncoderStateFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedIntraFrameRequest(ssrc);
+}
+
+void EncoderStateFeedback::OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedSLI(ssrc, picture_id);
+}
+
+void EncoderStateFeedback::OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedRPSI(ssrc, picture_id);
+}
+
+void EncoderStateFeedback::OnLocalSsrcChanged(uint32_t old_ssrc,
+ uint32_t new_ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(old_ssrc);
+ if (it == encoders_.end() || encoders_.find(new_ssrc) != encoders_.end()) {
+ return;
+ }
+
+ ViEEncoder* encoder = it->second;
+ encoders_.erase(it);
+ encoders_[new_ssrc] = encoder;
+ encoder->OnLocalSsrcChanged(old_ssrc, new_ssrc);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/encoder_state_feedback.h b/webrtc/video/encoder_state_feedback.h
new file mode 100644
index 0000000000..620e382d89
--- /dev/null
+++ b/webrtc/video/encoder_state_feedback.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(mflodman) ViEEncoder has a time check to not send key frames too often,
+// move the logic to this class.
+
+#ifndef WEBRTC_VIDEO_ENCODER_STATE_FEEDBACK_H_
+#define WEBRTC_VIDEO_ENCODER_STATE_FEEDBACK_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class EncoderStateFeedbackObserver;
+class RtcpIntraFrameObserver;
+class ViEEncoder;
+
+class EncoderStateFeedback {
+ public:
+ friend class EncoderStateFeedbackObserver;
+
+ EncoderStateFeedback();
+ ~EncoderStateFeedback();
+
+ // Adds an encoder to receive feedback for a set of SSRCs.
+ void AddEncoder(const std::vector<uint32_t>& ssrc, ViEEncoder* encoder);
+
+ // Removes a registered ViEEncoder.
+ void RemoveEncoder(const ViEEncoder* encoder);
+
+ // Returns an observer to register at the requesting class. The observer has
+ // the same lifetime as the EncoderStateFeedback instance.
+ RtcpIntraFrameObserver* GetRtcpIntraFrameObserver();
+
+ protected:
+ // Called by EncoderStateFeedbackObserver when a new key frame is requested.
+ void OnReceivedIntraFrameRequest(uint32_t ssrc);
+ void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id);
+ void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id);
+ void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc);
+
+ private:
+ typedef std::map<uint32_t, ViEEncoder*> SsrcEncoderMap;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Instance registered at the class requesting new key frames.
+ rtc::scoped_ptr<EncoderStateFeedbackObserver> observer_;
+
+ // Maps a unique ssrc to the given encoder.
+ SsrcEncoderMap encoders_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(EncoderStateFeedback);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENCODER_STATE_FEEDBACK_H_
diff --git a/webrtc/video/encoder_state_feedback_unittest.cc b/webrtc/video/encoder_state_feedback_unittest.cc
new file mode 100644
index 0000000000..834447e513
--- /dev/null
+++ b/webrtc/video/encoder_state_feedback_unittest.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for EncoderStateFeedback.
+#include "webrtc/video/encoder_state_feedback.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
+#include "webrtc/video/payload_router.h"
+#include "webrtc/video/vie_encoder.h"
+
+using ::testing::NiceMock;
+
+namespace webrtc {
+
+class MockVieEncoder : public ViEEncoder {
+ public:
+ explicit MockVieEncoder(ProcessThread* process_thread, PacedSender* pacer)
+ : ViEEncoder(1, process_thread, nullptr, nullptr, pacer, nullptr) {}
+ ~MockVieEncoder() {}
+
+ MOCK_METHOD1(OnReceivedIntraFrameRequest,
+ void(uint32_t));
+ MOCK_METHOD2(OnReceivedSLI,
+ void(uint32_t ssrc, uint8_t picture_id));
+ MOCK_METHOD2(OnReceivedRPSI,
+ void(uint32_t ssrc, uint64_t picture_id));
+ MOCK_METHOD2(OnLocalSsrcChanged,
+ void(uint32_t old_ssrc, uint32_t new_ssrc));
+};
+
+class VieKeyRequestTest : public ::testing::Test {
+ protected:
+ VieKeyRequestTest()
+ : pacer_(Clock::GetRealTimeClock(),
+ &router_,
+ BitrateController::kDefaultStartBitrateKbps,
+ PacedSender::kDefaultPaceMultiplier *
+ BitrateController::kDefaultStartBitrateKbps,
+ 0) {}
+ virtual void SetUp() {
+ process_thread_.reset(new NiceMock<MockProcessThread>);
+ encoder_state_feedback_.reset(new EncoderStateFeedback());
+ }
+ rtc::scoped_ptr<MockProcessThread> process_thread_;
+ rtc::scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
+ PacketRouter router_;
+ PacedSender pacer_;
+};
+
+TEST_F(VieKeyRequestTest, CreateAndTriggerRequests) {
+ const int ssrc = 1234;
+ MockVieEncoder encoder(process_thread_.get(), &pacer_);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc), &encoder);
+
+ EXPECT_CALL(encoder, OnReceivedIntraFrameRequest(ssrc))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc);
+
+ const uint8_t sli_picture_id = 3;
+ EXPECT_CALL(encoder, OnReceivedSLI(ssrc, sli_picture_id))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc, sli_picture_id);
+
+ const uint64_t rpsi_picture_id = 9;
+ EXPECT_CALL(encoder, OnReceivedRPSI(ssrc, rpsi_picture_id))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc, rpsi_picture_id);
+
+ encoder_state_feedback_->RemoveEncoder(&encoder);
+}
+
+// Register multiple encoders and make sure the request is relayed to correct
+// ViEEncoder.
+TEST_F(VieKeyRequestTest, MultipleEncoders) {
+ const int ssrc_1 = 1234;
+ const int ssrc_2 = 5678;
+ MockVieEncoder encoder_1(process_thread_.get(), &pacer_);
+ MockVieEncoder encoder_2(process_thread_.get(), &pacer_);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc_1),
+ &encoder_1);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc_2),
+ &encoder_2);
+
+ EXPECT_CALL(encoder_1, OnReceivedIntraFrameRequest(ssrc_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_2);
+
+ const uint8_t sli_pid_1 = 3;
+ const uint8_t sli_pid_2 = 4;
+ EXPECT_CALL(encoder_1, OnReceivedSLI(ssrc_1, sli_pid_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedSLI(ssrc_2, sli_pid_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc_1, sli_pid_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc_2, sli_pid_2);
+
+ const uint64_t rpsi_pid_1 = 9;
+ const uint64_t rpsi_pid_2 = 10;
+ EXPECT_CALL(encoder_1, OnReceivedRPSI(ssrc_1, rpsi_pid_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedRPSI(ssrc_2, rpsi_pid_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc_1, rpsi_pid_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc_2, rpsi_pid_2);
+
+ encoder_state_feedback_->RemoveEncoder(&encoder_1);
+ EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_2);
+ encoder_state_feedback_->RemoveEncoder(&encoder_2);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index e86f560cfd..48dc3e8bbd 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -24,15 +24,13 @@
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/metrics.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/test/call_test.h"
#include "webrtc/test/direct_transport.h"
#include "webrtc/test/encoder_settings.h"
-#include "webrtc/test/fake_audio_device.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator.h"
@@ -42,21 +40,20 @@
#include "webrtc/test/rtcp_packet_parser.h"
#include "webrtc/test/rtp_rtcp_observer.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video_encoder.h"
namespace webrtc {
-static const unsigned long kSilenceTimeoutMs = 2000;
+static const int kSilenceTimeoutMs = 2000;
class EndToEndTest : public test::CallTest {
public:
EndToEndTest() {}
virtual ~EndToEndTest() {
- EXPECT_EQ(nullptr, send_stream_);
- EXPECT_TRUE(receive_streams_.empty());
+ EXPECT_EQ(nullptr, video_send_stream_);
+ EXPECT_TRUE(video_receive_streams_.empty());
}
protected:
@@ -81,20 +78,20 @@ class EndToEndTest : public test::CallTest {
void TestXrReceiverReferenceTimeReport(bool enable_rrtr);
void TestSendsSetSsrcs(size_t num_ssrcs, bool send_single_ssrc_first);
void TestRtpStatePreservation(bool use_rtx);
- void VerifyHistogramStats(bool use_rtx, bool use_red);
+ void VerifyHistogramStats(bool use_rtx, bool use_red, bool screenshare);
};
TEST_F(EndToEndTest, ReceiverCanBeStartedTwice) {
CreateCalls(Call::Config(), Call::Config());
test::NullTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
CreateMatchingReceiveConfigs(&transport);
- CreateStreams();
+ CreateVideoStreams();
- receive_streams_[0]->Start();
- receive_streams_[0]->Start();
+ video_receive_streams_[0]->Start();
+ video_receive_streams_[0]->Start();
DestroyStreams();
}
@@ -103,13 +100,13 @@ TEST_F(EndToEndTest, ReceiverCanBeStoppedTwice) {
CreateCalls(Call::Config(), Call::Config());
test::NullTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
CreateMatchingReceiveConfigs(&transport);
- CreateStreams();
+ CreateVideoStreams();
- receive_streams_[0]->Stop();
- receive_streams_[0]->Stop();
+ video_receive_streams_[0]->Stop();
+ video_receive_streams_[0]->Stop();
DestroyStreams();
}
@@ -124,33 +121,33 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
class Renderer : public VideoRenderer {
public:
- Renderer() : event_(EventWrapper::Create()) {}
+ Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
- event_->Set();
+ event_.Set();
}
bool IsTextureSupported() const override { return false; }
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
- rtc::scoped_ptr<EventWrapper> event_;
+ rtc::Event event_;
} renderer;
class TestFrameCallback : public I420FrameCallback {
public:
- TestFrameCallback() : event_(EventWrapper::Create()) {}
+ TestFrameCallback() : event_(false, false) {}
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
private:
void FrameCallback(VideoFrame* frame) override {
SleepMs(kDelayRenderCallbackMs);
- event_->Set();
+ event_.Set();
}
- rtc::scoped_ptr<EventWrapper> event_;
+ rtc::Event event_;
};
CreateCalls(Call::Config(), Call::Config());
@@ -160,24 +157,25 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(&receiver_transport);
TestFrameCallback pre_render_callback;
- receive_configs_[0].pre_render_callback = &pre_render_callback;
- receive_configs_[0].renderer = &renderer;
+ video_receive_configs_[0].pre_render_callback = &pre_render_callback;
+ video_receive_configs_[0].renderer = &renderer;
- CreateStreams();
+ CreateVideoStreams();
Start();
// Create frames that are smaller than the send width/height, this is done to
// check that the callbacks are done after processing video.
rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight));
- send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
- EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
+ video_send_stream_->Input()->IncomingCapturedFrame(
+ *frame_generator->NextFrame());
+ EXPECT_TRUE(pre_render_callback.Wait())
<< "Timed out while waiting for pre-render callback.";
- EXPECT_EQ(kEventSignaled, renderer.Wait())
+ EXPECT_TRUE(renderer.Wait())
<< "Timed out while waiting for the frame to render.";
Stop();
@@ -191,17 +189,17 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
TEST_F(EndToEndTest, TransmitsFirstFrame) {
class Renderer : public VideoRenderer {
public:
- Renderer() : event_(EventWrapper::Create()) {}
+ Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
- event_->Set();
+ event_.Set();
}
bool IsTextureSupported() const override { return false; }
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
- rtc::scoped_ptr<EventWrapper> event_;
+ rtc::Event event_;
} renderer;
CreateCalls(Call::Config(), Call::Config());
@@ -211,19 +209,21 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(&receiver_transport);
- receive_configs_[0].renderer = &renderer;
+ video_receive_configs_[0].renderer = &renderer;
- CreateStreams();
+ CreateVideoStreams();
Start();
rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(
- encoder_config_.streams[0].width, encoder_config_.streams[0].height));
- send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
+ video_encoder_config_.streams[0].width,
+ video_encoder_config_.streams[0].height));
+ video_send_stream_->Input()->IncomingCapturedFrame(
+ *frame_generator->NextFrame());
- EXPECT_EQ(kEventSignaled, renderer.Wait())
+ EXPECT_TRUE(renderer.Wait())
<< "Timed out while waiting for the frame to render.";
Stop();
@@ -244,13 +244,14 @@ TEST_F(EndToEndTest, SendsAndReceivesVP9) {
frame_counter_(0) {}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for enough frames to be decoded.";
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = encoder_.get();
send_config->encoder_settings.payload_name = "VP9";
send_config->encoder_settings.payload_type = 124;
@@ -271,7 +272,7 @@ TEST_F(EndToEndTest, SendsAndReceivesVP9) {
int time_to_render_ms) override {
const int kRequiredFrames = 500;
if (++frame_counter_ == kRequiredFrames)
- observation_complete_->Set();
+ observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
@@ -282,7 +283,7 @@ TEST_F(EndToEndTest, SendsAndReceivesVP9) {
int frame_counter_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, SendsAndReceivesH264) {
@@ -294,18 +295,19 @@ TEST_F(EndToEndTest, SendsAndReceivesH264) {
frame_counter_(0) {}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for enough frames to be decoded.";
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms =
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
send_config->encoder_settings.encoder = &fake_encoder_;
send_config->encoder_settings.payload_name = "H264";
- send_config->encoder_settings.payload_type = kFakeSendPayloadType;
+ send_config->encoder_settings.payload_type = kFakeVideoSendPayloadType;
encoder_config->streams[0].min_bitrate_bps = 50000;
encoder_config->streams[0].target_bitrate_bps =
encoder_config->streams[0].max_bitrate_bps = 2000000;
@@ -323,7 +325,7 @@ TEST_F(EndToEndTest, SendsAndReceivesH264) {
int time_to_render_ms) override {
const int kRequiredFrames = 500;
if (++frame_counter_ == kRequiredFrames)
- observation_complete_->Set();
+ observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
@@ -334,7 +336,7 @@ TEST_F(EndToEndTest, SendsAndReceivesH264) {
int frame_counter_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReceiverUsesLocalSsrc) {
@@ -350,19 +352,19 @@ TEST_F(EndToEndTest, ReceiverUsesLocalSsrc) {
ssrc |= static_cast<uint32_t>(packet[5]) << 16;
ssrc |= static_cast<uint32_t>(packet[6]) << 8;
ssrc |= static_cast<uint32_t>(packet[7]) << 0;
- EXPECT_EQ(kReceiverLocalSsrc, ssrc);
- observation_complete_->Set();
+ EXPECT_EQ(kReceiverLocalVideoSsrc, ssrc);
+ observation_complete_.Set();
return SEND_PACKET;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for a receiver RTCP packet to be sent.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
@@ -373,7 +375,6 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
public:
NackObserver()
: EndToEndTest(kLongTimeoutMs),
- rtp_parser_(RtpHeaderParser::Create()),
sent_rtp_packets_(0),
packets_left_to_drop_(0),
nacks_left_(kNumberOfNacksToObserve) {}
@@ -382,7 +383,7 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
Action OnSendRtp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&crit_);
RTPHeader header;
- EXPECT_TRUE(rtp_parser_->Parse(packet, length, &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
// Never drop retransmitted packets.
if (dropped_packets_.find(header.sequenceNumber) !=
@@ -390,7 +391,7 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
retransmitted_packets_.insert(header.sequenceNumber);
if (nacks_left_ <= 0 &&
retransmitted_packets_.size() == dropped_packets_.size()) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
@@ -431,21 +432,21 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out waiting for packets to be NACKed, retransmitted and "
"rendered.";
}
rtc::CriticalSection crit_;
- rtc::scoped_ptr<RtpHeaderParser> rtp_parser_;
std::set<uint16_t> dropped_packets_;
std::set<uint16_t> retransmitted_packets_;
uint64_t sent_rtp_packets_;
@@ -453,7 +454,7 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
int nacks_left_ GUARDED_BY(&crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, CanReceiveFec) {
@@ -472,10 +473,10 @@ TEST_F(EndToEndTest, CanReceiveFec) {
if (header.payloadType == kRedPayloadType) {
encapsulated_payload_type =
static_cast<int>(packet[header.headerLength]);
- if (encapsulated_payload_type != kFakeSendPayloadType)
+ if (encapsulated_payload_type != kFakeVideoSendPayloadType)
EXPECT_EQ(kUlpfecPayloadType, encapsulated_payload_type);
} else {
- EXPECT_EQ(kFakeSendPayloadType, header.payloadType);
+ EXPECT_EQ(kFakeVideoSendPayloadType, header.payloadType);
}
if (protected_sequence_numbers_.count(header.sequenceNumber) != 0) {
@@ -499,7 +500,7 @@ TEST_F(EndToEndTest, CanReceiveFec) {
return DROP_PACKET;
break;
case kDropNextMediaPacket:
- if (encapsulated_payload_type == kFakeSendPayloadType) {
+ if (encapsulated_payload_type == kFakeVideoSendPayloadType) {
protected_sequence_numbers_.insert(header.sequenceNumber);
protected_timestamps_.insert(header.timestamp);
state_ = kDropEveryOtherPacketUntilFec;
@@ -517,7 +518,7 @@ TEST_F(EndToEndTest, CanReceiveFec) {
// Rendering frame with timestamp of packet that was dropped -> FEC
// protection worked.
if (protected_timestamps_.count(video_frame.timestamp()) != 0)
- observation_complete_->Set();
+ observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
@@ -528,9 +529,10 @@ TEST_F(EndToEndTest, CanReceiveFec) {
kDropNextMediaPacket,
} state_;
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
// TODO(pbos): Run this test with combined NACK/FEC enabled as well.
// int rtp_history_ms = 1000;
// (*receive_configs)[0].rtp.nack.rtp_history_ms = rtp_history_ms;
@@ -544,7 +546,7 @@ TEST_F(EndToEndTest, CanReceiveFec) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out waiting for dropped frames frames to be rendered.";
}
@@ -553,14 +555,14 @@ TEST_F(EndToEndTest, CanReceiveFec) {
std::set<uint32_t> protected_timestamps_ GUARDED_BY(crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
// Flacky on all platforms. See webrtc:4328.
TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
class FecNackObserver : public test::EndToEndTest {
public:
- explicit FecNackObserver()
+ FecNackObserver()
: EndToEndTest(kDefaultTimeoutMs),
state_(kFirstPacket),
fec_sequence_number_(0),
@@ -569,6 +571,7 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock_(&crit_);
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, length, &header));
@@ -576,10 +579,10 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
if (header.payloadType == kRedPayloadType) {
encapsulated_payload_type =
static_cast<int>(packet[header.headerLength]);
- if (encapsulated_payload_type != kFakeSendPayloadType)
+ if (encapsulated_payload_type != kFakeVideoSendPayloadType)
EXPECT_EQ(kUlpfecPayloadType, encapsulated_payload_type);
} else {
- EXPECT_EQ(kFakeSendPayloadType, header.payloadType);
+ EXPECT_EQ(kFakeVideoSendPayloadType, header.payloadType);
}
if (has_last_sequence_number_ &&
@@ -619,6 +622,7 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock_(&crit_);
if (state_ == kVerifyFecPacketNotInNackList) {
test::RtcpPacketParser rtcp_parser;
rtcp_parser.Parse(packet, length);
@@ -627,12 +631,22 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
IsNewerSequenceNumber(nacks.back(), fec_sequence_number_)) {
EXPECT_TRUE(std::find(
nacks.begin(), nacks.end(), fec_sequence_number_) == nacks.end());
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
return SEND_PACKET;
}
+ test::PacketTransport* CreateSendTransport(Call* sender_call) override {
+ // At low RTT (< kLowRttNackMs) -> NACK only, no FEC.
+ // Configure some network delay.
+ const int kNetworkDelayMs = 50;
+ FakeNetworkPipe::Config config;
+ config.queue_delay_ms = kNetworkDelayMs;
+ return new test::PacketTransport(sender_call, this,
+ test::PacketTransport::kSender, config);
+ }
+
// TODO(holmer): Investigate why we don't send FEC packets when the bitrate
// is 10 kbps.
Call::Config GetSenderCallConfig() override {
@@ -642,9 +656,10 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
// Configure hybrid NACK/FEC.
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
send_config->rtp.fec.red_payload_type = kRedPayloadType;
@@ -655,7 +670,7 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for FEC packets to be received.";
}
@@ -666,17 +681,13 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
kVerifyFecPacketNotInNackList,
} state_;
- uint16_t fec_sequence_number_;
+ rtc::CriticalSection crit_;
+ uint16_t fec_sequence_number_ GUARDED_BY(&crit_);
bool has_last_sequence_number_;
uint16_t last_sequence_number_;
} test;
- // At low RTT (< kLowRttNackMs) -> NACK only, no FEC.
- // Configure some network delay.
- const int kNetworkDelayMs = 50;
- FakeNetworkPipe::Config config;
- config.queue_delay_ms = kNetworkDelayMs;
- RunBaseTest(&test, config);
+ RunBaseTest(&test);
}
// This test drops second RTP packet with a marker bit set, makes sure it's
@@ -691,7 +702,7 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
explicit RetransmissionObserver(bool use_rtx, bool use_red)
: EndToEndTest(kDefaultTimeoutMs),
payload_type_(GetPayloadType(false, use_red)),
- retransmission_ssrc_(use_rtx ? kSendRtxSsrcs[0] : kSendSsrcs[0]),
+ retransmission_ssrc_(use_rtx ? kSendRtxSsrcs[0] : kVideoSendSsrcs[0]),
retransmission_payload_type_(GetPayloadType(use_rtx, use_red)),
marker_bits_observed_(0),
num_packets_observed_(0),
@@ -719,7 +730,7 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
return SEND_PACKET;
}
- EXPECT_EQ(kSendSsrcs[0], header.ssrc);
+ EXPECT_EQ(kVideoSendSsrcs[0], header.ssrc);
EXPECT_EQ(payload_type_, header.payloadType);
// Found the final packet of the frame to inflict loss to, drop this and
@@ -736,13 +747,14 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
rtc::CritScope lock(&crit_);
if (frame->timestamp() == retransmitted_timestamp_) {
EXPECT_TRUE(frame_retransmitted_);
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].pre_render_callback = this;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
@@ -757,21 +769,21 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
if (retransmission_ssrc_ == kSendRtxSsrcs[0]) {
send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
- (*receive_configs)[0].rtp.rtx[kFakeSendPayloadType].ssrc =
+ (*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].ssrc =
kSendRtxSsrcs[0];
- (*receive_configs)[0].rtp.rtx[kFakeSendPayloadType].payload_type =
+ (*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].payload_type =
kSendRtxPayloadType;
}
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for retransmission to render.";
}
int GetPayloadType(bool use_rtx, bool use_red) {
return use_rtx ? kSendRtxPayloadType
- : (use_red ? kRedPayloadType : kFakeSendPayloadType);
+ : (use_red ? kRedPayloadType : kFakeVideoSendPayloadType);
}
rtc::CriticalSection crit_;
@@ -784,7 +796,7 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
bool frame_retransmitted_;
} test(use_rtx, use_red);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, DecodesRetransmittedFrame) {
@@ -809,30 +821,30 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
class Renderer : public VideoRenderer {
public:
- Renderer() : event_(EventWrapper::Create()) {}
+ Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
EXPECT_EQ(0, *video_frame.buffer(kYPlane))
<< "Rendered frame should have zero luma which is applied by the "
"pre-render callback.";
- event_->Set();
+ event_.Set();
}
bool IsTextureSupported() const override { return false; }
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
- rtc::scoped_ptr<EventWrapper> event_;
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
+ rtc::Event event_;
} renderer;
class TestFrameCallback : public I420FrameCallback {
public:
TestFrameCallback(int expected_luma_byte, int next_luma_byte)
- : event_(EventWrapper::Create()),
+ : event_(false, false),
expected_luma_byte_(expected_luma_byte),
next_luma_byte_(next_luma_byte) {}
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
private:
virtual void FrameCallback(VideoFrame* frame) {
@@ -850,10 +862,10 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
next_luma_byte_,
frame->allocated_size(kYPlane));
- event_->Set();
+ event_.Set();
}
- rtc::scoped_ptr<EventWrapper> event_;
+ rtc::Event event_;
int expected_luma_byte_;
int next_luma_byte_;
};
@@ -868,34 +880,35 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
rtc::scoped_ptr<VideoEncoder> encoder(
VideoEncoder::Create(VideoEncoder::kVp8));
- send_config_.encoder_settings.encoder = encoder.get();
- send_config_.encoder_settings.payload_name = "VP8";
- ASSERT_EQ(1u, encoder_config_.streams.size()) << "Test setup error.";
- encoder_config_.streams[0].width = kWidth;
- encoder_config_.streams[0].height = kHeight;
- send_config_.pre_encode_callback = &pre_encode_callback;
+ video_send_config_.encoder_settings.encoder = encoder.get();
+ video_send_config_.encoder_settings.payload_name = "VP8";
+ ASSERT_EQ(1u, video_encoder_config_.streams.size()) << "Test setup error.";
+ video_encoder_config_.streams[0].width = kWidth;
+ video_encoder_config_.streams[0].height = kHeight;
+ video_send_config_.pre_encode_callback = &pre_encode_callback;
CreateMatchingReceiveConfigs(&receiver_transport);
- receive_configs_[0].pre_render_callback = &pre_render_callback;
- receive_configs_[0].renderer = &renderer;
+ video_receive_configs_[0].pre_render_callback = &pre_render_callback;
+ video_receive_configs_[0].renderer = &renderer;
- CreateStreams();
+ CreateVideoStreams();
Start();
// Create frames that are smaller than the send width/height, this is done to
// check that the callbacks are done after processing video.
rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kWidth / 2, kHeight / 2));
- send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
+ video_send_stream_->Input()->IncomingCapturedFrame(
+ *frame_generator->NextFrame());
- EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait())
+ EXPECT_TRUE(pre_encode_callback.Wait())
<< "Timed out while waiting for pre-encode callback.";
- EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
+ EXPECT_TRUE(pre_render_callback.Wait())
<< "Timed out while waiting for pre-render callback.";
- EXPECT_EQ(kEventSignaled, renderer.Wait())
+ EXPECT_TRUE(renderer.Wait())
<< "Timed out while waiting for the frame to render.";
Stop();
@@ -962,7 +975,7 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
rtc::CritScope lock(&crit_);
if (received_pli_ &&
video_frame.timestamp() > highest_dropped_timestamp_) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
if (!received_pli_)
frames_to_drop_ = kPacketsToDrop;
@@ -970,18 +983,19 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
bool IsTextureSupported() const override { return false; }
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = rtp_history_ms_;
(*receive_configs)[0].rtp.nack.rtp_history_ms = rtp_history_ms_;
(*receive_configs)[0].renderer = this;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait()) << "Timed out waiting for PLI to be "
- "received and a frame to be "
- "rendered afterwards.";
+ EXPECT_TRUE(Wait()) << "Timed out waiting for PLI to be "
+ "received and a frame to be "
+ "rendered afterwards.";
}
rtc::CriticalSection crit_;
@@ -992,15 +1006,14 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
bool received_pli_ GUARDED_BY(&crit_);
} test(rtp_history_ms);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReceivesPliAndRecoversWithNack) {
ReceivesPliAndRecovers(1000);
}
-// TODO(pbos): Enable this when 2250 is resolved.
-TEST_F(EndToEndTest, DISABLED_ReceivesPliAndRecoversWithoutNack) {
+TEST_F(EndToEndTest, ReceivesPliAndRecoversWithoutNack) {
ReceivesPliAndRecovers(0);
}
@@ -1008,11 +1021,9 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
class PacketInputObserver : public PacketReceiver {
public:
explicit PacketInputObserver(PacketReceiver* receiver)
- : receiver_(receiver), delivered_packet_(EventWrapper::Create()) {}
+ : receiver_(receiver), delivered_packet_(false, false) {}
- EventTypeWrapper Wait() {
- return delivered_packet_->Wait(kDefaultTimeoutMs);
- }
+ bool Wait() { return delivered_packet_.Wait(kDefaultTimeoutMs); }
private:
DeliveryStatus DeliverPacket(MediaType media_type,
@@ -1026,13 +1037,13 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
DeliveryStatus delivery_status =
receiver_->DeliverPacket(media_type, packet, length, packet_time);
EXPECT_EQ(DELIVERY_UNKNOWN_SSRC, delivery_status);
- delivered_packet_->Set();
+ delivered_packet_.Set();
return delivery_status;
}
}
PacketReceiver* receiver_;
- rtc::scoped_ptr<EventWrapper> delivered_packet_;
+ rtc::Event delivered_packet_;
};
CreateCalls(Call::Config(), Call::Config());
@@ -1043,18 +1054,18 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
send_transport.SetReceiver(&input_observer);
receive_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &send_transport);
+ CreateSendConfig(1, 0, &send_transport);
CreateMatchingReceiveConfigs(&receive_transport);
- CreateStreams();
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
- receiver_call_->DestroyVideoReceiveStream(receive_streams_[0]);
- receive_streams_.clear();
+ receiver_call_->DestroyVideoReceiveStream(video_receive_streams_[0]);
+ video_receive_streams_.clear();
// Wait() waits for a received packet.
- EXPECT_EQ(kEventSignaled, input_observer.Wait());
+ EXPECT_TRUE(input_observer.Wait());
Stop();
@@ -1103,16 +1114,16 @@ void EndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
if (!has_report_block) {
ADD_FAILURE() << "Received RTCP packet without receiver report for "
"RtcpMode::kCompound.";
- observation_complete_->Set();
+ observation_complete_.Set();
}
if (sent_rtcp_ >= kNumCompoundRtcpPacketsToObserve)
- observation_complete_->Set();
+ observation_complete_.Set();
break;
case RtcpMode::kReducedSize:
if (!has_report_block)
- observation_complete_->Set();
+ observation_complete_.Set();
break;
case RtcpMode::kOff:
RTC_NOTREACHED();
@@ -1122,16 +1133,17 @@ void EndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.rtcp_mode = rtcp_mode_;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< (rtcp_mode_ == RtcpMode::kCompound
? "Timed out before observing enough compound packets."
: "Timed out before receiving a non-compound RTCP packet.");
@@ -1142,7 +1154,7 @@ void EndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
int sent_rtcp_;
} test(rtcp_mode);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, UsesRtcpCompoundMode) {
@@ -1219,7 +1231,7 @@ class MultiStreamTest {
VideoReceiveStream::Config receive_config(receiver_transport.get());
receive_config.rtp.remote_ssrc = ssrc;
- receive_config.rtp.local_ssrc = test::CallTest::kReceiverLocalSsrc;
+ receive_config.rtp.local_ssrc = test::CallTest::kReceiverLocalVideoSsrc;
VideoReceiveStream::Decoder decoder =
test::CreateMatchingDecoder(send_config.encoder_settings);
allocated_decoders.push_back(decoder.decoder);
@@ -1282,27 +1294,27 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
: settings_(settings),
ssrc_(ssrc),
frame_generator_(frame_generator),
- done_(EventWrapper::Create()) {}
+ done_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
EXPECT_EQ(settings_.width, video_frame.width());
EXPECT_EQ(settings_.height, video_frame.height());
(*frame_generator_)->Stop();
- done_->Set();
+ done_.Set();
}
uint32_t Ssrc() { return ssrc_; }
bool IsTextureSupported() const override { return false; }
- EventTypeWrapper Wait() { return done_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return done_.Wait(kDefaultTimeoutMs); }
private:
const MultiStreamTest::CodecSettings& settings_;
const uint32_t ssrc_;
test::FrameGeneratorCapturer** const frame_generator_;
- rtc::scoped_ptr<EventWrapper> done_;
+ rtc::Event done_;
};
class Tester : public MultiStreamTest {
@@ -1313,8 +1325,8 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
protected:
void Wait() override {
for (const auto& observer : observers_) {
- EXPECT_EQ(EventTypeWrapper::kEventSignaled, observer->Wait())
- << "Time out waiting for from on ssrc " << observer->Ssrc();
+ EXPECT_TRUE(observer->Wait()) << "Time out waiting for from on ssrc "
+ << observer->Ssrc();
}
}
@@ -1350,7 +1362,7 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
const uint32_t& first_media_ssrc,
const std::map<uint32_t, uint32_t>& ssrc_map)
: DirectTransport(sender_call),
- done_(EventWrapper::Create()),
+ done_(false, false),
parser_(RtpHeaderParser::Create()),
first_media_ssrc_(first_media_ssrc),
rtx_to_media_ssrcs_(ssrc_map),
@@ -1419,7 +1431,7 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
}
if (IsDone())
- done_->Set();
+ done_.Set();
if (drop_packet)
return true;
@@ -1441,18 +1453,18 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
return seqno_range == received_packed_ids_.size();
}
- EventTypeWrapper Wait() {
+ bool Wait() {
{
// Can't be sure until this point that rtx_to_media_ssrcs_ etc have
// been initialized and are OK to read.
rtc::CritScope cs(&lock_);
started_ = true;
}
- return done_->Wait(kDefaultTimeoutMs);
+ return done_.Wait(kDefaultTimeoutMs);
}
rtc::CriticalSection lock_;
- rtc::scoped_ptr<EventWrapper> done_;
+ rtc::Event done_;
rtc::scoped_ptr<RtpHeaderParser> parser_;
SequenceNumberUnwrapper unwrapper_;
std::set<int64_t> received_packed_ids_;
@@ -1475,7 +1487,7 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
protected:
void Wait() override {
RTC_DCHECK(observer_ != nullptr);
- EXPECT_EQ(EventTypeWrapper::kEventSignaled, observer_->Wait());
+ EXPECT_TRUE(observer_->Wait());
}
void UpdateSendConfig(
@@ -1530,77 +1542,122 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
tester.RunTest();
}
-TEST_F(EndToEndTest, ReceivesTransportFeedback) {
- static const int kExtensionId = 5;
+class TransportFeedbackTester : public test::EndToEndTest {
+ public:
+ explicit TransportFeedbackTester(bool feedback_enabled,
+ size_t num_video_streams,
+ size_t num_audio_streams)
+ : EndToEndTest(::webrtc::EndToEndTest::kDefaultTimeoutMs),
+ feedback_enabled_(feedback_enabled),
+ num_video_streams_(num_video_streams),
+ num_audio_streams_(num_audio_streams) {
+ // Only one stream of each supported for now.
+ EXPECT_LE(num_video_streams, 1u);
+ EXPECT_LE(num_audio_streams, 1u);
+ }
- class TransportFeedbackObserver : public test::DirectTransport {
- public:
- TransportFeedbackObserver(Call* receiver_call, rtc::Event* done_event)
- : DirectTransport(receiver_call), done_(done_event) {}
- virtual ~TransportFeedbackObserver() {}
+ protected:
+ Action OnSendRtcp(const uint8_t* data, size_t length) override {
+ EXPECT_FALSE(HasTransportFeedback(data, length));
+ return SEND_PACKET;
+ }
- bool SendRtcp(const uint8_t* data, size_t length) override {
- RTCPUtility::RTCPParserV2 parser(data, length, true);
- EXPECT_TRUE(parser.IsValid());
+ Action OnReceiveRtcp(const uint8_t* data, size_t length) override {
+ if (HasTransportFeedback(data, length))
+ observation_complete_.Set();
+ return SEND_PACKET;
+ }
- RTCPUtility::RTCPPacketTypes packet_type = parser.Begin();
- while (packet_type != RTCPUtility::RTCPPacketTypes::kInvalid) {
- if (packet_type == RTCPUtility::RTCPPacketTypes::kTransportFeedback) {
- done_->Set();
- break;
- }
- packet_type = parser.Iterate();
- }
+ bool HasTransportFeedback(const uint8_t* data, size_t length) const {
+ RTCPUtility::RTCPParserV2 parser(data, length, true);
+ EXPECT_TRUE(parser.IsValid());
- return test::DirectTransport::SendRtcp(data, length);
+ RTCPUtility::RTCPPacketTypes packet_type = parser.Begin();
+ while (packet_type != RTCPUtility::RTCPPacketTypes::kInvalid) {
+ if (packet_type == RTCPUtility::RTCPPacketTypes::kTransportFeedback)
+ return true;
+ packet_type = parser.Iterate();
}
- rtc::Event* done_;
- };
+ return false;
+ }
- class TransportFeedbackTester : public MultiStreamTest {
- public:
- TransportFeedbackTester() : done_(false, false) {}
- virtual ~TransportFeedbackTester() {}
+ void PerformTest() override {
+ const int64_t kDisabledFeedbackTimeoutMs = 5000;
+ EXPECT_EQ(feedback_enabled_,
+ observation_complete_.Wait(feedback_enabled_
+ ? test::CallTest::kDefaultTimeoutMs
+ : kDisabledFeedbackTimeoutMs));
+ }
- protected:
- void Wait() override {
- EXPECT_TRUE(done_.Wait(CallTest::kDefaultTimeoutMs));
- }
+ void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
+ receiver_call_ = receiver_call;
+ }
- void UpdateSendConfig(
- size_t stream_index,
- VideoSendStream::Config* send_config,
- VideoEncoderConfig* encoder_config,
- test::FrameGeneratorCapturer** frame_generator) override {
- send_config->rtp.extensions.push_back(
- RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
- }
+ size_t GetNumVideoStreams() const override { return num_video_streams_; }
+ size_t GetNumAudioStreams() const override { return num_audio_streams_; }
- void UpdateReceiveConfig(
- size_t stream_index,
- VideoReceiveStream::Config* receive_config) override {
- receive_config->rtp.extensions.push_back(
- RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
- }
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ (*receive_configs)[0].rtp.transport_cc = feedback_enabled_;
+ }
- test::DirectTransport* CreateReceiveTransport(
- Call* receiver_call) override {
- return new TransportFeedbackObserver(receiver_call, &done_);
- }
+ void ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStream::Config>* receive_configs) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
+ (*receive_configs)[0].rtp.extensions.clear();
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ (*receive_configs)[0].rtp.transport_cc = feedback_enabled_;
+ (*receive_configs)[0].combined_audio_video_bwe = true;
+ }
- private:
- rtc::Event done_;
- } tester;
- tester.RunTest();
+ private:
+ static const int kExtensionId = 5;
+ const bool feedback_enabled_;
+ const size_t num_video_streams_;
+ const size_t num_audio_streams_;
+ Call* receiver_call_;
+};
+
+TEST_F(EndToEndTest, VideoReceivesTransportFeedback) {
+ TransportFeedbackTester test(true, 1, 0);
+ RunBaseTest(&test);
+}
+
+TEST_F(EndToEndTest, VideoTransportFeedbackNotConfigured) {
+ TransportFeedbackTester test(false, 1, 0);
+ RunBaseTest(&test);
+}
+
+TEST_F(EndToEndTest, AudioReceivesTransportFeedback) {
+ TransportFeedbackTester test(true, 0, 1);
+ RunBaseTest(&test);
+}
+
+TEST_F(EndToEndTest, AudioTransportFeedbackNotConfigured) {
+ TransportFeedbackTester test(false, 0, 1);
+ RunBaseTest(&test);
}
+
+TEST_F(EndToEndTest, AudioVideoReceivesTransportFeedback) {
+ TransportFeedbackTester test(true, 1, 1);
+ RunBaseTest(&test);
+}
+
TEST_F(EndToEndTest, ObserversEncodedFrames) {
class EncodedFrameTestObserver : public EncodedFrameObserver {
public:
EncodedFrameTestObserver()
- : length_(0),
- frame_type_(kEmptyFrame),
- called_(EventWrapper::Create()) {}
+ : length_(0), frame_type_(kEmptyFrame), called_(false, false) {}
virtual ~EncodedFrameTestObserver() {}
virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame) {
@@ -1608,10 +1665,10 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
length_ = encoded_frame.length_;
buffer_.reset(new uint8_t[length_]);
memcpy(buffer_.get(), encoded_frame.data_, length_);
- called_->Set();
+ called_.Set();
}
- EventTypeWrapper Wait() { return called_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return called_.Wait(kDefaultTimeoutMs); }
void ExpectEqualFrames(const EncodedFrameTestObserver& observer) {
ASSERT_EQ(length_, observer.length_)
@@ -1626,7 +1683,7 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
rtc::scoped_ptr<uint8_t[]> buffer_;
size_t length_;
FrameType frame_type_;
- rtc::scoped_ptr<EventWrapper> called_;
+ rtc::Event called_;
};
EncodedFrameTestObserver post_encode_observer;
@@ -1639,23 +1696,25 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(&receiver_transport);
- send_config_.post_encode_callback = &post_encode_observer;
- receive_configs_[0].pre_decode_callback = &pre_decode_observer;
+ video_send_config_.post_encode_callback = &post_encode_observer;
+ video_receive_configs_[0].pre_decode_callback = &pre_decode_observer;
- CreateStreams();
+ CreateVideoStreams();
Start();
rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(
- encoder_config_.streams[0].width, encoder_config_.streams[0].height));
- send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
+ video_encoder_config_.streams[0].width,
+ video_encoder_config_.streams[0].height));
+ video_send_stream_->Input()->IncomingCapturedFrame(
+ *frame_generator->NextFrame());
- EXPECT_EQ(kEventSignaled, post_encode_observer.Wait())
+ EXPECT_TRUE(post_encode_observer.Wait())
<< "Timed out while waiting for send-side encoded-frame callback.";
- EXPECT_EQ(kEventSignaled, pre_decode_observer.Wait())
+ EXPECT_TRUE(pre_decode_observer.Wait())
<< "Timed out while waiting for pre-decode encoded-frame callback.";
post_encode_observer.ExpectEqualFrames(pre_decode_observer);
@@ -1683,29 +1742,29 @@ TEST_F(EndToEndTest, ReceiveStreamSendsRemb) {
while (packet_type != RTCPUtility::RTCPPacketTypes::kInvalid) {
if (packet_type == RTCPUtility::RTCPPacketTypes::kPsfbRemb) {
const RTCPUtility::RTCPPacket& packet = parser.Packet();
- EXPECT_EQ(packet.PSFBAPP.SenderSSRC, kReceiverLocalSsrc);
+ EXPECT_EQ(packet.PSFBAPP.SenderSSRC, kReceiverLocalVideoSsrc);
received_psfb = true;
} else if (packet_type == RTCPUtility::RTCPPacketTypes::kPsfbRembItem) {
const RTCPUtility::RTCPPacket& packet = parser.Packet();
EXPECT_GT(packet.REMBItem.BitRate, 0u);
EXPECT_EQ(packet.REMBItem.NumberOfSSRCs, 1u);
- EXPECT_EQ(packet.REMBItem.SSRCs[0], kSendSsrcs[0]);
+ EXPECT_EQ(packet.REMBItem.SSRCs[0], kVideoSendSsrcs[0]);
received_remb = true;
}
packet_type = parser.Iterate();
}
if (received_psfb && received_remb)
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait()) << "Timed out while waiting for a "
- "receiver RTCP REMB packet to be "
- "sent.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for a "
+ "receiver RTCP REMB packet to be "
+ "sent.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, VerifyBandwidthStats) {
@@ -1724,7 +1783,7 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
has_seen_pacer_delay_ = sender_stats.pacer_delay_ms > 0;
if (sender_stats.send_bandwidth_bps > 0 &&
receiver_stats.recv_bandwidth_bps > 0 && has_seen_pacer_delay_) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
@@ -1735,8 +1794,8 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait()) << "Timed out while waiting for "
- "non-zero bandwidth stats.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for "
+ "non-zero bandwidth stats.";
}
private:
@@ -1745,7 +1804,7 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
bool has_seen_pacer_delay_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, VerifyNackStats) {
@@ -1762,6 +1821,7 @@ TEST_F(EndToEndTest, VerifyNackStats) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
if (++sent_rtp_packets_ == kPacketNumberToDrop) {
rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
@@ -1774,6 +1834,7 @@ TEST_F(EndToEndTest, VerifyNackStats) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
test::RtcpPacketParser rtcp_parser;
rtcp_parser.Parse(packet, length);
std::vector<uint16_t> nacks = rtcp_parser.nack_item()->last_nack_list();
@@ -1784,7 +1845,7 @@ TEST_F(EndToEndTest, VerifyNackStats) {
return SEND_PACKET;
}
- void VerifyStats() {
+ void VerifyStats() EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
if (!dropped_rtp_packet_requested_)
return;
int send_stream_nack_packets = 0;
@@ -1804,7 +1865,7 @@ TEST_F(EndToEndTest, VerifyNackStats) {
if (send_stream_nack_packets >= 1 && receive_stream_nack_packets >= 1) {
// NACK packet sent on receive stream and received on sent stream.
if (MinMetricRunTimePassed())
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
@@ -1818,14 +1879,15 @@ TEST_F(EndToEndTest, VerifyNackStats) {
return elapsed_sec > metrics::kMinRunTimeInSeconds;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
@@ -1833,20 +1895,20 @@ TEST_F(EndToEndTest, VerifyNackStats) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out waiting for packet to be NACKed.";
+ EXPECT_TRUE(Wait()) << "Timed out waiting for packet to be NACKed.";
}
+ rtc::CriticalSection crit_;
uint64_t sent_rtp_packets_;
- uint16_t dropped_rtp_packet_;
- bool dropped_rtp_packet_requested_;
+ uint16_t dropped_rtp_packet_ GUARDED_BY(&crit_);
+ bool dropped_rtp_packet_requested_ GUARDED_BY(&crit_);
std::vector<VideoReceiveStream*> receive_streams_;
VideoSendStream* send_stream_;
int64_t start_runtime_ms_;
} test;
test::ClearHistograms();
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.UniqueNackRequestsSentInPercent"));
@@ -1858,13 +1920,16 @@ TEST_F(EndToEndTest, VerifyNackStats) {
"WebRTC.Video.NackPacketsReceivedPerMinute"), 0);
}
-void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
+void EndToEndTest::VerifyHistogramStats(bool use_rtx,
+ bool use_red,
+ bool screenshare) {
class StatsObserver : public test::EndToEndTest {
public:
- StatsObserver(bool use_rtx, bool use_red)
+ StatsObserver(bool use_rtx, bool use_red, bool screenshare)
: EndToEndTest(kLongTimeoutMs),
use_rtx_(use_rtx),
use_red_(use_red),
+ screenshare_(screenshare),
sender_call_(nullptr),
receiver_call_(nullptr),
start_runtime_ms_(-1) {}
@@ -1872,7 +1937,7 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
if (MinMetricRunTimePassed())
- observation_complete_->Set();
+ observation_complete_.Set();
// GetStats calls GetSendChannelRtcpStatistics
// (via VideoSendStream::GetRtt) which updates ReportBlockStats used by
@@ -1893,9 +1958,10 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
return elapsed_sec > metrics::kMinRunTimeInSeconds * 2;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
// NACK
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
@@ -1910,11 +1976,14 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
if (use_rtx_) {
send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
- (*receive_configs)[0].rtp.rtx[kFakeSendPayloadType].ssrc =
+ (*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].ssrc =
kSendRtxSsrcs[0];
- (*receive_configs)[0].rtp.rtx[kFakeSendPayloadType].payload_type =
+ (*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].payload_type =
kSendRtxPayloadType;
}
+ encoder_config->content_type =
+ screenshare_ ? VideoEncoderConfig::ContentType::kScreen
+ : VideoEncoderConfig::ContentType::kRealtimeVideo;
}
void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
@@ -1923,21 +1992,37 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out waiting for packet to be NACKed.";
+ EXPECT_TRUE(Wait()) << "Timed out waiting for packet to be NACKed.";
}
- bool use_rtx_;
- bool use_red_;
+ const bool use_rtx_;
+ const bool use_red_;
+ const bool screenshare_;
Call* sender_call_;
Call* receiver_call_;
int64_t start_runtime_ms_;
- } test(use_rtx, use_red);
+ } test(use_rtx, use_red, screenshare);
test::ClearHistograms();
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
+
+ // Delete the call for Call stats to be reported.
+ sender_call_.reset();
+ receiver_call_.reset();
+
+ std::string video_prefix =
+ screenshare ? "WebRTC.Video.Screenshare." : "WebRTC.Video.";
// Verify that stats have been updated once.
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Call.VideoBitrateReceivedInKbps"));
+ EXPECT_EQ(1,
+ test::NumHistogramSamples("WebRTC.Call.RtcpBitrateReceivedInBps"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Call.BitrateReceivedInKbps"));
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Call.EstimatedSendBitrateInKbps"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Call.PacerBitrateInKbps"));
+
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.NackPacketsSentPerMinute"));
EXPECT_EQ(1, test::NumHistogramSamples(
@@ -1951,8 +2036,8 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.PliPacketsReceivedPerMinute"));
- EXPECT_EQ(1, test::NumHistogramSamples(
- "WebRTC.Video.KeyFramesSentInPermille"));
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples(video_prefix + "KeyFramesSentInPermille"));
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.KeyFramesReceivedInPermille"));
@@ -1961,34 +2046,39 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.ReceivedPacketsLostInPercent"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.InputWidthInPixels"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.InputHeightInPixels"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.SentWidthInPixels"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.SentHeightInPixels"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "InputWidthInPixels"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "InputHeightInPixels"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SentWidthInPixels"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SentHeightInPixels"));
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.ReceivedWidthInPixels"));
EXPECT_EQ(1,
test::NumHistogramSamples("WebRTC.Video.ReceivedHeightInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].width),
- test::LastHistogramSample("WebRTC.Video.InputWidthInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].height),
- test::LastHistogramSample("WebRTC.Video.InputHeightInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].width),
- test::LastHistogramSample("WebRTC.Video.SentWidthInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].height),
- test::LastHistogramSample("WebRTC.Video.SentHeightInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].width),
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].width),
+ test::LastHistogramSample(video_prefix + "InputWidthInPixels"));
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].height),
+ test::LastHistogramSample(video_prefix + "InputHeightInPixels"));
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].width),
+ test::LastHistogramSample(video_prefix + "SentWidthInPixels"));
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].height),
+ test::LastHistogramSample(video_prefix + "SentHeightInPixels"));
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].width),
test::LastHistogramSample("WebRTC.Video.ReceivedWidthInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].height),
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].height),
test::LastHistogramSample("WebRTC.Video.ReceivedHeightInPixels"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.InputFramesPerSecond"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.SentFramesPerSecond"));
+ EXPECT_EQ(1,
+ test::NumHistogramSamples(video_prefix + "InputFramesPerSecond"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SentFramesPerSecond"));
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.DecodedFramesPerSecond"));
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.RenderFramesPerSecond"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.EncodeTimeInMs"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.OnewayDelayInMs"));
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));
+
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "EncodeTimeInMs"));
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.DecodeTimeInMs"));
EXPECT_EQ(1, test::NumHistogramSamples(
@@ -2008,6 +2098,10 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.RetransmittedBitrateReceivedInKbps"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SendSideDelayInMs"));
+ EXPECT_EQ(1,
+ test::NumHistogramSamples(video_prefix + "SendSideDelayMaxInMs"));
+
int num_rtx_samples = use_rtx ? 1 : 0;
EXPECT_EQ(num_rtx_samples, test::NumHistogramSamples(
"WebRTC.Video.RtxBitrateSentInKbps"));
@@ -2026,13 +2120,22 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
TEST_F(EndToEndTest, VerifyHistogramStatsWithRtx) {
const bool kEnabledRtx = true;
const bool kEnabledRed = false;
- VerifyHistogramStats(kEnabledRtx, kEnabledRed);
+ const bool kScreenshare = false;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
}
TEST_F(EndToEndTest, VerifyHistogramStatsWithRed) {
const bool kEnabledRtx = false;
const bool kEnabledRed = true;
- VerifyHistogramStats(kEnabledRtx, kEnabledRed);
+ const bool kScreenshare = false;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
+}
+
+TEST_F(EndToEndTest, VerifyHistogramStatsWithScreenshare) {
+ const bool kEnabledRtx = false;
+ const bool kEnabledRed = false;
+ const bool kScreenshare = true;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
}
void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
@@ -2050,6 +2153,7 @@ void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
private:
// Receive stream should send RR packets (and RRTR packets if enabled).
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
RTCPUtility::RTCPParserV2 parser(packet, length, true);
EXPECT_TRUE(parser.IsValid());
@@ -2070,6 +2174,7 @@ void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
}
// Send stream should send SR packets (and DLRR packets if enabled).
virtual Action OnSendRtcp(const uint8_t* packet, size_t length) {
+ rtc::CritScope lock(&crit_);
RTCPUtility::RTCPParserV2 parser(packet, length, true);
EXPECT_TRUE(parser.IsValid());
@@ -2094,32 +2199,34 @@ void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
EXPECT_EQ(0, sent_rtcp_rrtr_);
EXPECT_EQ(0, sent_rtcp_dlrr_);
}
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
(*receive_configs)[0].rtp.rtcp_mode = RtcpMode::kReducedSize;
(*receive_configs)[0].rtp.rtcp_xr.receiver_reference_time_report =
enable_rrtr_;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for RTCP SR/RR packets to be sent.";
}
+ rtc::CriticalSection crit_;
bool enable_rrtr_;
int sent_rtcp_sr_;
- int sent_rtcp_rr_;
- int sent_rtcp_rrtr_;
+ int sent_rtcp_rr_ GUARDED_BY(&crit_);
+ int sent_rtcp_rrtr_ GUARDED_BY(&crit_);
int sent_rtcp_dlrr_;
} test(enable_rrtr);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
@@ -2148,28 +2255,29 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
<< "Received unknown SSRC: " << header.ssrc;
if (!valid_ssrcs_[header.ssrc])
- observation_complete_->Set();
+ observation_complete_.Set();
if (!is_observed_[header.ssrc]) {
is_observed_[header.ssrc] = true;
--ssrcs_to_observe_;
if (expect_single_ssrc_) {
expect_single_ssrc_ = false;
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
if (ssrcs_to_observe_ == 0)
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
- size_t GetNumStreams() const override { return num_ssrcs_; }
+ size_t GetNumVideoStreams() const override { return num_ssrcs_; }
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
if (num_ssrcs_ > 1) {
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
for (size_t i = 0; i < encoder_config->streams.size(); ++i) {
@@ -2179,27 +2287,27 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
}
}
- encoder_config_all_streams_ = *encoder_config;
+ video_encoder_config_all_streams_ = *encoder_config;
if (send_single_ssrc_first_)
encoder_config->streams.resize(1);
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for "
- << (send_single_ssrc_first_ ? "first SSRC." : "SSRCs.");
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for "
+ << (send_single_ssrc_first_ ? "first SSRC."
+ : "SSRCs.");
if (send_single_ssrc_first_) {
// Set full simulcast and continue with the rest of the SSRCs.
- send_stream_->ReconfigureVideoEncoder(encoder_config_all_streams_);
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting on additional SSRCs.";
+ send_stream_->ReconfigureVideoEncoder(
+ video_encoder_config_all_streams_);
+ EXPECT_TRUE(Wait()) << "Timed out while waiting on additional SSRCs.";
}
}
@@ -2214,10 +2322,10 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
bool expect_single_ssrc_;
VideoSendStream* send_stream_;
- VideoEncoderConfig encoder_config_all_streams_;
- } test(kSendSsrcs, num_ssrcs, send_single_ssrc_first);
+ VideoEncoderConfig video_encoder_config_all_streams_;
+ } test(kVideoSendSsrcs, num_ssrcs, send_single_ssrc_first);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReportsSetEncoderRates) {
@@ -2230,15 +2338,16 @@ TEST_F(EndToEndTest, ReportsSetEncoderRates) {
send_stream_(nullptr),
bitrate_kbps_(0) {}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
}
@@ -2248,15 +2357,15 @@ TEST_F(EndToEndTest, ReportsSetEncoderRates) {
return 0;
rtc::CritScope lock(&crit_);
bitrate_kbps_ = new_target_bitrate;
- observation_complete_->Set();
+ observation_complete_.Set();
return 0;
}
void PerformTest() override {
- ASSERT_EQ(kEventSignaled, Wait())
+ ASSERT_TRUE(Wait())
<< "Timed out while waiting for encoder SetRates() call.";
// Wait for GetStats to report a corresponding bitrate.
- for (unsigned int i = 0; i < kDefaultTimeoutMs; ++i) {
+ for (int i = 0; i < kDefaultTimeoutMs; ++i) {
VideoSendStream::Stats stats = send_stream_->GetStats();
{
rtc::CritScope lock(&crit_);
@@ -2277,7 +2386,7 @@ TEST_F(EndToEndTest, ReportsSetEncoderRates) {
uint32_t bitrate_kbps_ GUARDED_BY(crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, GetStats) {
@@ -2289,26 +2398,26 @@ TEST_F(EndToEndTest, GetStats) {
: EndToEndTest(kLongTimeoutMs),
send_stream_(nullptr),
expected_send_ssrcs_(),
- check_stats_event_(EventWrapper::Create()) {}
+ check_stats_event_(false, false) {}
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- check_stats_event_->Set();
+ check_stats_event_.Set();
return SEND_PACKET;
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
- check_stats_event_->Set();
+ check_stats_event_.Set();
return SEND_PACKET;
}
Action OnReceiveRtp(const uint8_t* packet, size_t length) override {
- check_stats_event_->Set();
+ check_stats_event_.Set();
return SEND_PACKET;
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- check_stats_event_->Set();
+ check_stats_event_.Set();
return SEND_PACKET;
}
@@ -2330,6 +2439,9 @@ TEST_F(EndToEndTest, GetStats) {
receive_stats_filled_["IncomingRate"] |=
stats.network_frame_rate != 0 || stats.total_bitrate_bps != 0;
+ send_stats_filled_["DecoderImplementationName"] |=
+ stats.decoder_implementation_name ==
+ test::FakeDecoder::kImplementationName;
receive_stats_filled_["RenderDelayAsHighAsExpected"] |=
stats.render_delay_ms >= kExpectedRenderDelayMs;
@@ -2367,9 +2479,9 @@ TEST_F(EndToEndTest, GetStats) {
stats.rtcp_packet_type_counts.unique_nack_requests != 0;
assert(stats.current_payload_type == -1 ||
- stats.current_payload_type == kFakeSendPayloadType);
+ stats.current_payload_type == kFakeVideoSendPayloadType);
receive_stats_filled_["IncomingPayloadType"] |=
- stats.current_payload_type == kFakeSendPayloadType;
+ stats.current_payload_type == kFakeVideoSendPayloadType;
}
return AllStatsFilled(receive_stats_filled_);
@@ -2385,6 +2497,10 @@ TEST_F(EndToEndTest, GetStats) {
send_stats_filled_["CpuOveruseMetrics"] |=
stats.avg_encode_time_ms != 0 || stats.encode_usage_percent != 0;
+ send_stats_filled_["EncoderImplementationName"] |=
+ stats.encoder_implementation_name ==
+ test::FakeEncoder::kImplementationName;
+
for (std::map<uint32_t, VideoSendStream::StreamStats>::const_iterator it =
stats.substreams.begin();
it != stats.substreams.end(); ++it) {
@@ -2450,15 +2566,23 @@ TEST_F(EndToEndTest, GetStats) {
return true;
}
+ test::PacketTransport* CreateSendTransport(Call* sender_call) override {
+ FakeNetworkPipe::Config network_config;
+ network_config.loss_percent = 5;
+ return new test::PacketTransport(
+ sender_call, this, test::PacketTransport::kSender, network_config);
+ }
+
Call::Config GetSenderCallConfig() override {
Call::Config config = EndToEndTest::GetSenderCallConfig();
config.bitrate_config.start_bitrate_bps = kStartBitrateBps;
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->pre_encode_callback = this; // Used to inject delay.
expected_cname_ = send_config->rtp.c_name = "SomeCName";
@@ -2471,9 +2595,9 @@ TEST_F(EndToEndTest, GetStats) {
}
}
- size_t GetNumStreams() const override { return kNumSsrcs; }
+ size_t GetNumVideoStreams() const override { return kNumSsrcs; }
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
@@ -2498,7 +2622,7 @@ TEST_F(EndToEndTest, GetStats) {
int64_t time_until_timout_ = stop_time - now;
if (time_until_timout_ > 0)
- check_stats_event_->Wait(time_until_timout_);
+ check_stats_event_.Wait(time_until_timout_);
now = clock->TimeInMilliseconds();
}
@@ -2532,12 +2656,10 @@ TEST_F(EndToEndTest, GetStats) {
std::set<uint32_t> expected_send_ssrcs_;
std::string expected_cname_;
- rtc::scoped_ptr<EventWrapper> check_stats_event_;
+ rtc::Event check_stats_event_;
} test;
- FakeNetworkPipe::Config network_config;
- network_config.loss_percent = 5;
- RunBaseTest(&test, network_config);
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReceiverReferenceTimeReportEnabled) {
@@ -2558,7 +2680,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
sent_rtp_(0) {}
private:
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
receive_stream_ = receive_streams[0];
@@ -2568,7 +2690,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
if (sent_rtp_ >= kNumRtpPacketsToSend) {
VideoReceiveStream::Stats stats = receive_stream_->GetStats();
if (kNumRtpPacketsToSend == stats.rtp_stats.transmitted.packets) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
return DROP_PACKET;
}
@@ -2577,7 +2699,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while verifying number of received RTP packets.";
}
@@ -2585,7 +2707,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
uint32_t sent_rtp_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, SendsSetSsrc) { TestSendsSetSsrcs(1, false); }
@@ -2626,17 +2748,18 @@ TEST_F(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
if (!observed_redundant_retransmission_[header.ssrc]) {
observed_redundant_retransmission_[header.ssrc] = true;
if (--ssrcs_to_observe_ == 0)
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
- size_t GetNumStreams() const override { return kNumSsrcs; }
+ size_t GetNumVideoStreams() const override { return kNumSsrcs; }
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
for (size_t i = 0; i < encoder_config->streams.size(); ++i) {
encoder_config->streams[i].min_bitrate_bps = 10000;
@@ -2655,7 +2778,7 @@ TEST_F(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for redundant payloads on all SSRCs.";
}
@@ -2665,7 +2788,7 @@ TEST_F(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
std::map<uint32_t, bool> registered_rtx_ssrc_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
@@ -2677,7 +2800,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
: test::RtpRtcpObserver(kDefaultTimeoutMs),
ssrcs_to_observe_(kNumSsrcs) {
for (size_t i = 0; i < kNumSsrcs; ++i) {
- configured_ssrcs_[kSendSsrcs[i]] = true;
+ configured_ssrcs_[kVideoSendSsrcs[i]] = true;
if (use_rtx)
configured_ssrcs_[kSendRtxSsrcs[i]] = true;
}
@@ -2744,7 +2867,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
if (!ssrc_observed_[ssrc] && !only_padding) {
ssrc_observed_[ssrc] = true;
if (--ssrcs_to_observe_ == 0)
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
@@ -2770,77 +2893,75 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
send_transport.SetReceiver(receiver_call_->Receiver());
receive_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(kNumSsrcs, &send_transport);
+ CreateSendConfig(kNumSsrcs, 0, &send_transport);
if (use_rtx) {
for (size_t i = 0; i < kNumSsrcs; ++i) {
- send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+ video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
}
- send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
+ video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
}
// Lower bitrates so that all streams send initially.
- for (size_t i = 0; i < encoder_config_.streams.size(); ++i) {
- encoder_config_.streams[i].min_bitrate_bps = 10000;
- encoder_config_.streams[i].target_bitrate_bps = 15000;
- encoder_config_.streams[i].max_bitrate_bps = 20000;
+ for (size_t i = 0; i < video_encoder_config_.streams.size(); ++i) {
+ video_encoder_config_.streams[i].min_bitrate_bps = 10000;
+ video_encoder_config_.streams[i].target_bitrate_bps = 15000;
+ video_encoder_config_.streams[i].max_bitrate_bps = 20000;
}
// Use the same total bitrates when sending a single stream to avoid lowering
// the bitrate estimate and requiring a subsequent rampup.
- VideoEncoderConfig one_stream = encoder_config_;
+ VideoEncoderConfig one_stream = video_encoder_config_;
one_stream.streams.resize(1);
- for (size_t i = 1; i < encoder_config_.streams.size(); ++i) {
+ for (size_t i = 1; i < video_encoder_config_.streams.size(); ++i) {
one_stream.streams.front().min_bitrate_bps +=
- encoder_config_.streams[i].min_bitrate_bps;
+ video_encoder_config_.streams[i].min_bitrate_bps;
one_stream.streams.front().target_bitrate_bps +=
- encoder_config_.streams[i].target_bitrate_bps;
+ video_encoder_config_.streams[i].target_bitrate_bps;
one_stream.streams.front().max_bitrate_bps +=
- encoder_config_.streams[i].max_bitrate_bps;
+ video_encoder_config_.streams[i].max_bitrate_bps;
}
CreateMatchingReceiveConfigs(&receive_transport);
- CreateStreams();
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
- EXPECT_EQ(kEventSignaled, observer.Wait())
+ EXPECT_TRUE(observer.Wait())
<< "Timed out waiting for all SSRCs to send packets.";
// Test stream resetting more than once to make sure that the state doesn't
// get set once (this could be due to using std::map::insert for instance).
for (size_t i = 0; i < 3; ++i) {
frame_generator_capturer_->Stop();
- sender_call_->DestroyVideoSendStream(send_stream_);
+ sender_call_->DestroyVideoSendStream(video_send_stream_);
// Re-create VideoSendStream with only one stream.
- send_stream_ =
- sender_call_->CreateVideoSendStream(send_config_, one_stream);
- send_stream_->Start();
+ video_send_stream_ =
+ sender_call_->CreateVideoSendStream(video_send_config_, one_stream);
+ video_send_stream_->Start();
CreateFrameGeneratorCapturer();
frame_generator_capturer_->Start();
observer.ResetExpectedSsrcs(1);
- EXPECT_EQ(kEventSignaled, observer.Wait())
- << "Timed out waiting for single RTP packet.";
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet.";
// Reconfigure back to use all streams.
- send_stream_->ReconfigureVideoEncoder(encoder_config_);
+ video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
observer.ResetExpectedSsrcs(kNumSsrcs);
- EXPECT_EQ(kEventSignaled, observer.Wait())
+ EXPECT_TRUE(observer.Wait())
<< "Timed out waiting for all SSRCs to send packets.";
// Reconfigure down to one stream.
- send_stream_->ReconfigureVideoEncoder(one_stream);
+ video_send_stream_->ReconfigureVideoEncoder(one_stream);
observer.ResetExpectedSsrcs(1);
- EXPECT_EQ(kEventSignaled, observer.Wait())
- << "Timed out waiting for single RTP packet.";
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet.";
// Reconfigure back to use all streams.
- send_stream_->ReconfigureVideoEncoder(encoder_config_);
+ video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
observer.ResetExpectedSsrcs(kNumSsrcs);
- EXPECT_EQ(kEventSignaled, observer.Wait())
+ EXPECT_TRUE(observer.Wait())
<< "Timed out waiting for all SSRCs to send packets.";
}
@@ -2874,8 +2995,8 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
NetworkStateTest()
: EndToEndTest(kDefaultTimeoutMs),
FakeEncoder(Clock::GetRealTimeClock()),
- encoded_frames_(EventWrapper::Create()),
- packet_event_(EventWrapper::Create()),
+ encoded_frames_(false, false),
+ packet_event_(false, false),
sender_call_(nullptr),
receiver_call_(nullptr),
sender_state_(kNetworkUp),
@@ -2887,14 +3008,14 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
Action OnSendRtp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&test_crit_);
++sender_rtp_;
- packet_event_->Set();
+ packet_event_.Set();
return SEND_PACKET;
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&test_crit_);
++sender_rtcp_;
- packet_event_->Set();
+ packet_event_.Set();
return SEND_PACKET;
}
@@ -2906,7 +3027,7 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&test_crit_);
++receiver_rtcp_;
- packet_event_->Set();
+ packet_event_.Set();
return SEND_PACKET;
}
@@ -2915,14 +3036,15 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
receiver_call_ = receiver_call;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, encoded_frames_->Wait(kDefaultTimeoutMs))
+ EXPECT_TRUE(encoded_frames_.Wait(kDefaultTimeoutMs))
<< "No frames received by the encoder.";
// Wait for packets from both sender/receiver.
WaitForPacketsOrSilence(false, false);
@@ -2963,9 +3085,9 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
EXPECT_LE(down_frames_, 1)
<< "Encoding more than one frame while network is down.";
if (down_frames_ > 1)
- encoded_frames_->Set();
+ encoded_frames_.Set();
} else {
- encoded_frames_->Set();
+ encoded_frames_.Set();
}
}
return test::FakeEncoder::Encode(
@@ -2986,8 +3108,8 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
}
bool sender_done = false;
bool receiver_done = false;
- while(!sender_done || !receiver_done) {
- packet_event_->Wait(kSilenceTimeoutMs);
+ while (!sender_done || !receiver_done) {
+ packet_event_.Wait(kSilenceTimeoutMs);
int64_t time_now_ms = clock_->TimeInMilliseconds();
rtc::CritScope lock(&test_crit_);
if (sender_down) {
@@ -3020,8 +3142,8 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
}
rtc::CriticalSection test_crit_;
- const rtc::scoped_ptr<EventWrapper> encoded_frames_;
- const rtc::scoped_ptr<EventWrapper> packet_event_;
+ rtc::Event encoded_frames_;
+ rtc::Event packet_event_;
Call* sender_call_;
Call* receiver_call_;
NetworkState sender_state_ GUARDED_BY(test_crit_);
@@ -3031,7 +3153,7 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
int down_frames_ GUARDED_BY(test_crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, CallReportsRttForSender) {
@@ -3048,10 +3170,10 @@ TEST_F(EndToEndTest, CallReportsRttForSender) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(&receiver_transport);
- CreateStreams();
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
@@ -3074,7 +3196,7 @@ TEST_F(EndToEndTest, CallReportsRttForSender) {
TEST_F(EndToEndTest, NewSendStreamsRespectNetworkDown) {
class UnusedEncoder : public test::FakeEncoder {
- public:
+ public:
UnusedEncoder() : FakeEncoder(Clock::GetRealTimeClock()) {}
int32_t Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
@@ -3089,10 +3211,10 @@ TEST_F(EndToEndTest, NewSendStreamsRespectNetworkDown) {
sender_call_->SignalNetworkState(kNetworkDown);
UnusedTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
UnusedEncoder unused_encoder;
- send_config_.encoder_settings.encoder = &unused_encoder;
- CreateStreams();
+ video_send_config_.encoder_settings.encoder = &unused_encoder;
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
@@ -3108,10 +3230,10 @@ TEST_F(EndToEndTest, NewReceiveStreamsRespectNetworkDown) {
test::DirectTransport sender_transport(sender_call_.get());
sender_transport.SetReceiver(receiver_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
UnusedTransport transport;
CreateMatchingReceiveConfigs(&transport);
- CreateStreams();
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
@@ -3168,4 +3290,76 @@ TEST_F(EndToEndTest, VerifyDefaultReceiveConfigParameters) {
VerifyEmptyFecConfig(default_receive_config.rtp.fec);
}
+TEST_F(EndToEndTest, TransportSeqNumOnAudioAndVideo) {
+ static const int kExtensionId = 8;
+ class TransportSequenceNumberTest : public test::EndToEndTest {
+ public:
+ TransportSequenceNumberTest()
+ : EndToEndTest(kDefaultTimeoutMs),
+ video_observed_(false),
+ audio_observed_(false) {
+ parser_->RegisterRtpHeaderExtension(kRtpExtensionTransportSequenceNumber,
+ kExtensionId);
+ }
+
+ size_t GetNumVideoStreams() const override { return 1; }
+ size_t GetNumAudioStreams() const override { return 1; }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ }
+
+ void ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStream::Config>* receive_configs) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
+ (*receive_configs)[0].rtp.extensions.clear();
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ }
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
+ EXPECT_TRUE(header.extension.hasTransportSequenceNumber);
+ // Unwrap packet id and verify uniqueness.
+ int64_t packet_id =
+ unwrapper_.Unwrap(header.extension.transportSequenceNumber);
+ EXPECT_TRUE(received_packet_ids_.insert(packet_id).second);
+
+ if (header.ssrc == kVideoSendSsrcs[0])
+ video_observed_ = true;
+ if (header.ssrc == kAudioSendSsrc)
+ audio_observed_ = true;
+ if (audio_observed_ && video_observed_ &&
+ received_packet_ids_.size() == 50) {
+ size_t packet_id_range =
+ *received_packet_ids_.rbegin() - *received_packet_ids_.begin() + 1;
+ EXPECT_EQ(received_packet_ids_.size(), packet_id_range);
+ observation_complete_.Set();
+ }
+ return SEND_PACKET;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for audio and video "
+ "packets with transport sequence number.";
+ }
+
+ private:
+ bool video_observed_;
+ bool audio_observed_;
+ SequenceNumberUnwrapper unwrapper_;
+ std::set<int64_t> received_packet_ids_;
+ } test;
+
+ RunBaseTest(&test);
+}
} // namespace webrtc
diff --git a/webrtc/video/full_stack.cc b/webrtc/video/full_stack.cc
index 8511b8281e..e870c1ff14 100644
--- a/webrtc/video/full_stack.cc
+++ b/webrtc/video/full_stack.cc
@@ -23,6 +23,15 @@ class FullStackTest : public VideoQualityTest {
}
};
+// VideoQualityTest::Params params = {
+// { ... }, // Common.
+// { ... }, // Video-specific settings.
+// { ... }, // Screenshare-specific settings.
+// { ... }, // Analyzer settings.
+// pipe, // FakeNetworkPipe::Config
+// { ... }, // Spatial scalability.
+// logs // bool
+// };
TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) {
VideoQualityTest::Params paris_qcif = {
@@ -120,28 +129,55 @@ TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) {
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL) {
VideoQualityTest::Params screenshare = {
- {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 400000},
- {}, // Video-specific.
- {true, 10}, // Screenshare-specific.
+ {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 1, 400000},
+ {},
+ {true, 10},
{"screenshare_slides", 0.0, 0.0, kFullStackTestDurationSecs}};
RunTest(screenshare);
}
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) {
VideoQualityTest::Params config = {
- {1850, 1110 / 2, 5, 50000, 200000, 2000000, "VP8", 2, 400000},
+ {1850, 1110 / 2, 5, 50000, 200000, 2000000, "VP8", 2, 1, 400000},
{},
{true, 10, 2},
{"screenshare_slides_scrolling", 0.0, 0.0, kFullStackTestDurationSecs}};
RunTest(config);
}
-TEST_F(FullStackTest, ScreenshareSlidesVP9_2TL) {
+TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNet) {
VideoQualityTest::Params screenshare = {
- {1850, 1110, 5, 50000, 200000, 2000000, "VP9", 2, 400000},
+ {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 1, 400000},
+ {}, // Video-specific.
+ {true, 10}, // Screenshare-specific.
+ {"screenshare_slides_lossy_net", 0.0, 0.0, kFullStackTestDurationSecs}};
+ screenshare.pipe.loss_percent = 5;
+ screenshare.pipe.queue_delay_ms = 200;
+ screenshare.pipe.link_capacity_kbps = 500;
+ RunTest(screenshare);
+}
+
+TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) {
+ VideoQualityTest::Params screenshare = {
+ {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 1, 400000},
+ {}, // Video-specific.
+ {true, 10}, // Screenshare-specific.
+ {"screenshare_slides_very_lossy", 0.0, 0.0, kFullStackTestDurationSecs}};
+ screenshare.pipe.loss_percent = 10;
+ screenshare.pipe.queue_delay_ms = 200;
+ screenshare.pipe.link_capacity_kbps = 500;
+ RunTest(screenshare);
+}
+
+TEST_F(FullStackTest, ScreenshareSlidesVP9_2SL) {
+ VideoQualityTest::Params screenshare = {
+ {1850, 1110, 5, 50000, 200000, 2000000, "VP9", 1, 0, 400000},
{},
{true, 10},
- {"screenshare_slides_vp9_2tl", 0.0, 0.0, kFullStackTestDurationSecs}};
+ {"screenshare_slides_vp9_2sl", 0.0, 0.0, kFullStackTestDurationSecs},
+ {},
+ false,
+ {std::vector<VideoStream>(), 0, 2, 1}};
RunTest(screenshare);
}
} // namespace webrtc
diff --git a/webrtc/video/overuse_frame_detector.cc b/webrtc/video/overuse_frame_detector.cc
new file mode 100644
index 0000000000..d971ad9d3e
--- /dev/null
+++ b/webrtc/video/overuse_frame_detector.cc
@@ -0,0 +1,364 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/overuse_frame_detector.h"
+
+#include <assert.h>
+#include <math.h>
+
+#include <algorithm>
+#include <list>
+#include <map>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/exp_filter.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+namespace {
+const int64_t kProcessIntervalMs = 5000;
+
+// Delay between consecutive rampups. (Used for quick recovery.)
+const int kQuickRampUpDelayMs = 10 * 1000;
+// Delay between rampup attempts. Initially uses standard, scales up to max.
+const int kStandardRampUpDelayMs = 40 * 1000;
+const int kMaxRampUpDelayMs = 240 * 1000;
+// Expontential back-off factor, to prevent annoying up-down behaviour.
+const double kRampUpBackoffFactor = 2.0;
+
+// Max number of overuses detected before always applying the rampup delay.
+const int kMaxOverusesBeforeApplyRampupDelay = 4;
+
+// The maximum exponent to use in VCMExpFilter.
+const float kSampleDiffMs = 33.0f;
+const float kMaxExp = 7.0f;
+
+} // namespace
+
+// Class for calculating the processing usage on the send-side (the average
+// processing time of a frame divided by the average time difference between
+// captured frames).
+class OveruseFrameDetector::SendProcessingUsage {
+ public:
+ explicit SendProcessingUsage(const CpuOveruseOptions& options)
+ : kWeightFactorFrameDiff(0.998f),
+ kWeightFactorProcessing(0.995f),
+ kInitialSampleDiffMs(40.0f),
+ kMaxSampleDiffMs(45.0f),
+ count_(0),
+ options_(options),
+ filtered_processing_ms_(new rtc::ExpFilter(kWeightFactorProcessing)),
+ filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
+ Reset();
+ }
+ ~SendProcessingUsage() {}
+
+ void Reset() {
+ count_ = 0;
+ filtered_frame_diff_ms_->Reset(kWeightFactorFrameDiff);
+ filtered_frame_diff_ms_->Apply(1.0f, kInitialSampleDiffMs);
+ filtered_processing_ms_->Reset(kWeightFactorProcessing);
+ filtered_processing_ms_->Apply(1.0f, InitialProcessingMs());
+ }
+
+ void AddCaptureSample(float sample_ms) {
+ float exp = sample_ms / kSampleDiffMs;
+ exp = std::min(exp, kMaxExp);
+ filtered_frame_diff_ms_->Apply(exp, sample_ms);
+ }
+
+ void AddSample(float processing_ms, int64_t diff_last_sample_ms) {
+ ++count_;
+ float exp = diff_last_sample_ms / kSampleDiffMs;
+ exp = std::min(exp, kMaxExp);
+ filtered_processing_ms_->Apply(exp, processing_ms);
+ }
+
+ int Value() const {
+ if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
+ return static_cast<int>(InitialUsageInPercent() + 0.5f);
+ }
+ float frame_diff_ms = std::max(filtered_frame_diff_ms_->filtered(), 1.0f);
+ frame_diff_ms = std::min(frame_diff_ms, kMaxSampleDiffMs);
+ float encode_usage_percent =
+ 100.0f * filtered_processing_ms_->filtered() / frame_diff_ms;
+ return static_cast<int>(encode_usage_percent + 0.5);
+ }
+
+ private:
+ float InitialUsageInPercent() const {
+ // Start in between the underuse and overuse threshold.
+ return (options_.low_encode_usage_threshold_percent +
+ options_.high_encode_usage_threshold_percent) / 2.0f;
+ }
+
+ float InitialProcessingMs() const {
+ return InitialUsageInPercent() * kInitialSampleDiffMs / 100;
+ }
+
+ const float kWeightFactorFrameDiff;
+ const float kWeightFactorProcessing;
+ const float kInitialSampleDiffMs;
+ const float kMaxSampleDiffMs;
+ uint64_t count_;
+ const CpuOveruseOptions options_;
+ rtc::scoped_ptr<rtc::ExpFilter> filtered_processing_ms_;
+ rtc::scoped_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
+};
+
+// Class for calculating the processing time of frames.
+class OveruseFrameDetector::FrameQueue {
+ public:
+ FrameQueue() : last_processing_time_ms_(-1) {}
+ ~FrameQueue() {}
+
+ // Called when a frame is captured.
+ // Starts the measuring of the processing time of the frame.
+ void Start(int64_t capture_time, int64_t now) {
+ const size_t kMaxSize = 90; // Allows for processing time of 1.5s at 60fps.
+ if (frame_times_.size() > kMaxSize) {
+ LOG(LS_WARNING) << "Max size reached, removed oldest frame.";
+ frame_times_.erase(frame_times_.begin());
+ }
+ if (frame_times_.find(capture_time) != frame_times_.end()) {
+ // Frame should not exist.
+ assert(false);
+ return;
+ }
+ frame_times_[capture_time] = now;
+ }
+
+ // Called when the processing of a frame has finished.
+ // Returns the processing time of the frame.
+ int End(int64_t capture_time, int64_t now) {
+ std::map<int64_t, int64_t>::iterator it = frame_times_.find(capture_time);
+ if (it == frame_times_.end()) {
+ return -1;
+ }
+ // Remove any old frames up to current.
+ // Old frames have been skipped by the capture process thread.
+ // TODO(asapersson): Consider measuring time from first frame in list.
+ last_processing_time_ms_ = now - (*it).second;
+ frame_times_.erase(frame_times_.begin(), ++it);
+ return last_processing_time_ms_;
+ }
+
+ void Reset() { frame_times_.clear(); }
+ int NumFrames() const { return static_cast<int>(frame_times_.size()); }
+ int last_processing_time_ms() const { return last_processing_time_ms_; }
+
+ private:
+ // Captured frames mapped by the capture time.
+ std::map<int64_t, int64_t> frame_times_;
+ int last_processing_time_ms_;
+};
+
+
+OveruseFrameDetector::OveruseFrameDetector(
+ Clock* clock,
+ const CpuOveruseOptions& options,
+ CpuOveruseObserver* observer,
+ CpuOveruseMetricsObserver* metrics_observer)
+ : options_(options),
+ observer_(observer),
+ metrics_observer_(metrics_observer),
+ clock_(clock),
+ num_process_times_(0),
+ last_capture_time_(0),
+ num_pixels_(0),
+ next_process_time_(clock_->TimeInMilliseconds()),
+ last_overuse_time_(0),
+ checks_above_threshold_(0),
+ num_overuse_detections_(0),
+ last_rampup_time_(0),
+ in_quick_rampup_(false),
+ current_rampup_delay_ms_(kStandardRampUpDelayMs),
+ last_sample_time_ms_(0),
+ usage_(new SendProcessingUsage(options)),
+ frame_queue_(new FrameQueue()) {
+ RTC_DCHECK(metrics_observer != nullptr);
+ // Make sure stats are initially up-to-date. This simplifies unit testing
+ // since we don't have to trigger an update using one of the methods which
+ // would also alter the overuse state.
+ UpdateCpuOveruseMetrics();
+ processing_thread_.DetachFromThread();
+}
+
+OveruseFrameDetector::~OveruseFrameDetector() {
+}
+
+int OveruseFrameDetector::LastProcessingTimeMs() const {
+ rtc::CritScope cs(&crit_);
+ return frame_queue_->last_processing_time_ms();
+}
+
+int OveruseFrameDetector::FramesInQueue() const {
+ rtc::CritScope cs(&crit_);
+ return frame_queue_->NumFrames();
+}
+
+void OveruseFrameDetector::UpdateCpuOveruseMetrics() {
+ metrics_.encode_usage_percent = usage_->Value();
+
+ metrics_observer_->CpuOveruseMetricsUpdated(metrics_);
+}
+
+int64_t OveruseFrameDetector::TimeUntilNextProcess() {
+ RTC_DCHECK(processing_thread_.CalledOnValidThread());
+ return next_process_time_ - clock_->TimeInMilliseconds();
+}
+
+bool OveruseFrameDetector::FrameSizeChanged(int num_pixels) const {
+ if (num_pixels != num_pixels_) {
+ return true;
+ }
+ return false;
+}
+
+bool OveruseFrameDetector::FrameTimeoutDetected(int64_t now) const {
+ if (last_capture_time_ == 0) {
+ return false;
+ }
+ return (now - last_capture_time_) > options_.frame_timeout_interval_ms;
+}
+
+void OveruseFrameDetector::ResetAll(int num_pixels) {
+ num_pixels_ = num_pixels;
+ usage_->Reset();
+ frame_queue_->Reset();
+ last_capture_time_ = 0;
+ num_process_times_ = 0;
+ UpdateCpuOveruseMetrics();
+}
+
+void OveruseFrameDetector::FrameCaptured(int width,
+ int height,
+ int64_t capture_time_ms) {
+ rtc::CritScope cs(&crit_);
+
+ int64_t now = clock_->TimeInMilliseconds();
+ if (FrameSizeChanged(width * height) || FrameTimeoutDetected(now)) {
+ ResetAll(width * height);
+ }
+
+ if (last_capture_time_ != 0)
+ usage_->AddCaptureSample(now - last_capture_time_);
+
+ last_capture_time_ = now;
+
+ frame_queue_->Start(capture_time_ms, now);
+}
+
+void OveruseFrameDetector::FrameSent(int64_t capture_time_ms) {
+ rtc::CritScope cs(&crit_);
+ int delay_ms = frame_queue_->End(capture_time_ms,
+ clock_->TimeInMilliseconds());
+ if (delay_ms > 0) {
+ AddProcessingTime(delay_ms);
+ }
+}
+
+void OveruseFrameDetector::AddProcessingTime(int elapsed_ms) {
+ int64_t now = clock_->TimeInMilliseconds();
+ if (last_sample_time_ms_ != 0) {
+ int64_t diff_ms = now - last_sample_time_ms_;
+ usage_->AddSample(elapsed_ms, diff_ms);
+ }
+ last_sample_time_ms_ = now;
+ UpdateCpuOveruseMetrics();
+}
+
+int32_t OveruseFrameDetector::Process() {
+ RTC_DCHECK(processing_thread_.CalledOnValidThread());
+
+ int64_t now = clock_->TimeInMilliseconds();
+
+ // Used to protect against Process() being called too often.
+ if (now < next_process_time_)
+ return 0;
+
+ next_process_time_ = now + kProcessIntervalMs;
+
+ CpuOveruseMetrics current_metrics;
+ {
+ rtc::CritScope cs(&crit_);
+ ++num_process_times_;
+
+ current_metrics = metrics_;
+ if (num_process_times_ <= options_.min_process_count)
+ return 0;
+ }
+
+ if (IsOverusing(current_metrics)) {
+ // If the last thing we did was going up, and now have to back down, we need
+ // to check if this peak was short. If so we should back off to avoid going
+ // back and forth between this load, the system doesn't seem to handle it.
+ bool check_for_backoff = last_rampup_time_ > last_overuse_time_;
+ if (check_for_backoff) {
+ if (now - last_rampup_time_ < kStandardRampUpDelayMs ||
+ num_overuse_detections_ > kMaxOverusesBeforeApplyRampupDelay) {
+ // Going up was not ok for very long, back off.
+ current_rampup_delay_ms_ *= kRampUpBackoffFactor;
+ if (current_rampup_delay_ms_ > kMaxRampUpDelayMs)
+ current_rampup_delay_ms_ = kMaxRampUpDelayMs;
+ } else {
+ // Not currently backing off, reset rampup delay.
+ current_rampup_delay_ms_ = kStandardRampUpDelayMs;
+ }
+ }
+
+ last_overuse_time_ = now;
+ in_quick_rampup_ = false;
+ checks_above_threshold_ = 0;
+ ++num_overuse_detections_;
+
+ if (observer_ != NULL)
+ observer_->OveruseDetected();
+ } else if (IsUnderusing(current_metrics, now)) {
+ last_rampup_time_ = now;
+ in_quick_rampup_ = true;
+
+ if (observer_ != NULL)
+ observer_->NormalUsage();
+ }
+
+ int rampup_delay =
+ in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
+
+ LOG(LS_VERBOSE) << " Frame stats: "
+ << " encode usage " << current_metrics.encode_usage_percent
+ << " overuse detections " << num_overuse_detections_
+ << " rampup delay " << rampup_delay;
+
+ return 0;
+}
+
+bool OveruseFrameDetector::IsOverusing(const CpuOveruseMetrics& metrics) {
+ if (metrics.encode_usage_percent >=
+ options_.high_encode_usage_threshold_percent) {
+ ++checks_above_threshold_;
+ } else {
+ checks_above_threshold_ = 0;
+ }
+ return checks_above_threshold_ >= options_.high_threshold_consecutive_count;
+}
+
+bool OveruseFrameDetector::IsUnderusing(const CpuOveruseMetrics& metrics,
+ int64_t time_now) {
+ int delay = in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
+ if (time_now < last_rampup_time_ + delay)
+ return false;
+
+ return metrics.encode_usage_percent <
+ options_.low_encode_usage_threshold_percent;
+}
+} // namespace webrtc
diff --git a/webrtc/video/overuse_frame_detector.h b/webrtc/video/overuse_frame_detector.h
new file mode 100644
index 0000000000..d2606c19e6
--- /dev/null
+++ b/webrtc/video/overuse_frame_detector.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_OVERUSE_FRAME_DETECTOR_H_
+#define WEBRTC_VIDEO_OVERUSE_FRAME_DETECTOR_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/exp_filter.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/modules/include/module.h"
+
+namespace webrtc {
+
+class Clock;
+
+// CpuOveruseObserver is called when a system overuse is detected and
+// VideoEngine cannot keep up the encoding frequency.
+class CpuOveruseObserver {
+ public:
+ // Called as soon as an overuse is detected.
+ virtual void OveruseDetected() = 0;
+ // Called periodically when the system is not overused any longer.
+ virtual void NormalUsage() = 0;
+
+ protected:
+ virtual ~CpuOveruseObserver() {}
+};
+
+struct CpuOveruseOptions {
+ CpuOveruseOptions()
+ : low_encode_usage_threshold_percent(55),
+ high_encode_usage_threshold_percent(85),
+ frame_timeout_interval_ms(1500),
+ min_frame_samples(120),
+ min_process_count(3),
+ high_threshold_consecutive_count(2) {}
+
+ int low_encode_usage_threshold_percent; // Threshold for triggering underuse.
+ int high_encode_usage_threshold_percent; // Threshold for triggering overuse.
+ // General settings.
+ int frame_timeout_interval_ms; // The maximum allowed interval between two
+ // frames before resetting estimations.
+ int min_frame_samples; // The minimum number of frames required.
+ int min_process_count; // The number of initial process times required before
+ // triggering an overuse/underuse.
+ int high_threshold_consecutive_count; // The number of consecutive checks
+ // above the high threshold before
+ // triggering an overuse.
+};
+
+struct CpuOveruseMetrics {
+ CpuOveruseMetrics() : encode_usage_percent(-1) {}
+
+ int encode_usage_percent; // Average encode time divided by the average time
+ // difference between incoming captured frames.
+};
+
+class CpuOveruseMetricsObserver {
+ public:
+ virtual ~CpuOveruseMetricsObserver() {}
+ virtual void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) = 0;
+};
+
+
+// Use to detect system overuse based on the send-side processing time of
+// incoming frames.
+class OveruseFrameDetector : public Module {
+ public:
+ OveruseFrameDetector(Clock* clock,
+ const CpuOveruseOptions& options,
+ CpuOveruseObserver* overuse_observer,
+ CpuOveruseMetricsObserver* metrics_observer);
+ ~OveruseFrameDetector();
+
+ // Called for each captured frame.
+ void FrameCaptured(int width, int height, int64_t capture_time_ms);
+
+ // Called for each sent frame.
+ void FrameSent(int64_t capture_time_ms);
+
+ // Only public for testing.
+ int LastProcessingTimeMs() const;
+ int FramesInQueue() const;
+
+ // Implements Module.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ private:
+ class SendProcessingUsage;
+ class FrameQueue;
+
+ void UpdateCpuOveruseMetrics() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // TODO(asapersson): This method is only used on one thread, so it shouldn't
+ // need a guard.
+ void AddProcessingTime(int elapsed_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Only called on the processing thread.
+ bool IsOverusing(const CpuOveruseMetrics& metrics);
+ bool IsUnderusing(const CpuOveruseMetrics& metrics, int64_t time_now);
+
+ bool FrameTimeoutDetected(int64_t now) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ bool FrameSizeChanged(int num_pixels) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ void ResetAll(int num_pixels) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Protecting all members except const and those that are only accessed on the
+ // processing thread.
+ // TODO(asapersson): See if we can reduce locking. As is, video frame
+ // processing contends with reading stats and the processing thread.
+ mutable rtc::CriticalSection crit_;
+
+ const CpuOveruseOptions options_;
+
+ // Observer getting overuse reports.
+ CpuOveruseObserver* const observer_;
+
+ // Stats metrics.
+ CpuOveruseMetricsObserver* const metrics_observer_;
+ CpuOveruseMetrics metrics_ GUARDED_BY(crit_);
+
+ Clock* const clock_;
+ int64_t num_process_times_ GUARDED_BY(crit_);
+
+ int64_t last_capture_time_ GUARDED_BY(crit_);
+
+ // Number of pixels of last captured frame.
+ int num_pixels_ GUARDED_BY(crit_);
+
+ // These seven members are only accessed on the processing thread.
+ int64_t next_process_time_;
+ int64_t last_overuse_time_;
+ int checks_above_threshold_;
+ int num_overuse_detections_;
+ int64_t last_rampup_time_;
+ bool in_quick_rampup_;
+ int current_rampup_delay_ms_;
+
+ int64_t last_sample_time_ms_; // Only accessed by one thread.
+
+ // TODO(asapersson): Can these be regular members (avoid separate heap
+ // allocs)?
+ const rtc::scoped_ptr<SendProcessingUsage> usage_ GUARDED_BY(crit_);
+ const rtc::scoped_ptr<FrameQueue> frame_queue_ GUARDED_BY(crit_);
+
+ rtc::ThreadChecker processing_thread_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_OVERUSE_FRAME_DETECTOR_H_
diff --git a/webrtc/video/overuse_frame_detector_unittest.cc b/webrtc/video/overuse_frame_detector_unittest.cc
new file mode 100644
index 0000000000..65e006b485
--- /dev/null
+++ b/webrtc/video/overuse_frame_detector_unittest.cc
@@ -0,0 +1,310 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/overuse_frame_detector.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace {
+ const int kWidth = 640;
+ const int kHeight = 480;
+ const int kFrameInterval33ms = 33;
+ const int kProcessIntervalMs = 5000;
+ const int kProcessTime5ms = 5;
+} // namespace
+
+class MockCpuOveruseObserver : public CpuOveruseObserver {
+ public:
+ MockCpuOveruseObserver() {}
+ virtual ~MockCpuOveruseObserver() {}
+
+ MOCK_METHOD0(OveruseDetected, void());
+ MOCK_METHOD0(NormalUsage, void());
+};
+
+class CpuOveruseObserverImpl : public CpuOveruseObserver {
+ public:
+ CpuOveruseObserverImpl() :
+ overuse_(0),
+ normaluse_(0) {}
+ virtual ~CpuOveruseObserverImpl() {}
+
+ void OveruseDetected() { ++overuse_; }
+ void NormalUsage() { ++normaluse_; }
+
+ int overuse_;
+ int normaluse_;
+};
+
+class OveruseFrameDetectorTest : public ::testing::Test,
+ public CpuOveruseMetricsObserver {
+ protected:
+ virtual void SetUp() {
+ clock_.reset(new SimulatedClock(1234));
+ observer_.reset(new MockCpuOveruseObserver());
+ options_.min_process_count = 0;
+ ReinitializeOveruseDetector();
+ }
+
+ void ReinitializeOveruseDetector() {
+ overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
+ observer_.get(), this));
+ }
+
+ void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override {
+ metrics_ = metrics;
+ }
+
+ int InitialUsage() {
+ return ((options_.low_encode_usage_threshold_percent +
+ options_.high_encode_usage_threshold_percent) / 2.0f) + 0.5;
+ }
+
+ void InsertAndSendFramesWithInterval(
+ int num_frames, int interval_ms, int width, int height, int delay_ms) {
+ while (num_frames-- > 0) {
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
+ overuse_detector_->FrameCaptured(width, height, capture_time_ms);
+ clock_->AdvanceTimeMilliseconds(delay_ms);
+ overuse_detector_->FrameSent(capture_time_ms);
+ clock_->AdvanceTimeMilliseconds(interval_ms - delay_ms);
+ }
+ }
+
+ void TriggerOveruse(int num_times) {
+ const int kDelayMs = 32;
+ for (int i = 0; i < num_times; ++i) {
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kDelayMs);
+ overuse_detector_->Process();
+ }
+ }
+
+ void TriggerUnderuse() {
+ const int kDelayMs1 = 5;
+ const int kDelayMs2 = 6;
+ InsertAndSendFramesWithInterval(
+ 1300, kFrameInterval33ms, kWidth, kHeight, kDelayMs1);
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight, kDelayMs2);
+ overuse_detector_->Process();
+ }
+
+ int UsagePercent() { return metrics_.encode_usage_percent; }
+
+ CpuOveruseOptions options_;
+ rtc::scoped_ptr<SimulatedClock> clock_;
+ rtc::scoped_ptr<MockCpuOveruseObserver> observer_;
+ rtc::scoped_ptr<OveruseFrameDetector> overuse_detector_;
+ CpuOveruseMetrics metrics_;
+};
+
+
+// UsagePercent() > high_encode_usage_threshold_percent => overuse.
+// UsagePercent() < low_encode_usage_threshold_percent => underuse.
+TEST_F(OveruseFrameDetectorTest, TriggerOveruse) {
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) {
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ // usage < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithNoObserver) {
+ overuse_detector_.reset(
+ new OveruseFrameDetector(clock_.get(), options_, nullptr, this));
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(2);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, TriggerUnderuseWithMinProcessCount) {
+ options_.min_process_count = 1;
+ CpuOveruseObserverImpl overuse_observer;
+ overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
+ &overuse_observer, this));
+ InsertAndSendFramesWithInterval(
+ 1200, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ overuse_detector_->Process();
+ EXPECT_EQ(0, overuse_observer.normaluse_);
+ clock_->AdvanceTimeMilliseconds(kProcessIntervalMs);
+ overuse_detector_->Process();
+ EXPECT_EQ(1, overuse_observer.normaluse_);
+}
+
+TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) {
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(64);
+ for (size_t i = 0; i < 64; ++i) {
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ }
+}
+
+TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ options_.high_threshold_consecutive_count = 2;
+ ReinitializeOveruseDetector();
+ TriggerOveruse(2);
+}
+
+TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ options_.high_threshold_consecutive_count = 2;
+ ReinitializeOveruseDetector();
+ TriggerOveruse(1);
+}
+
+TEST_F(OveruseFrameDetectorTest, ProcessingUsage) {
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_EQ(kProcessTime5ms * 100 / kFrameInterval33ms, UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, ResetAfterResolutionChange) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ // Verify reset.
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight + 1, kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, ResetAfterFrameTimeout) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 2, options_.frame_timeout_interval_ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ // Verify reset.
+ InsertAndSendFramesWithInterval(
+ 2, options_.frame_timeout_interval_ms + 1, kWidth, kHeight,
+ kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdating) {
+ options_.min_frame_samples = 40;
+ ReinitializeOveruseDetector();
+ InsertAndSendFramesWithInterval(
+ 40, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, InitialProcessingUsage) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrame) {
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_TwoFrames) {
+ const int kProcessingTimeMs1 = 100;
+ const int kProcessingTimeMs2 = 50;
+ const int kTimeBetweenFramesMs = 200;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs1);
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs1, overuse_detector_->LastProcessingTimeMs());
+ clock_->AdvanceTimeMilliseconds(kTimeBetweenFramesMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs2);
+ overuse_detector_->FrameSent(66);
+ EXPECT_EQ(kProcessingTimeMs2, overuse_detector_->LastProcessingTimeMs());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_MaxQueueSize) {
+ const int kMaxQueueSize = 91;
+ for (int i = 0; i < kMaxQueueSize * 2; ++i) {
+ overuse_detector_->FrameCaptured(kWidth, kHeight, i);
+ }
+ EXPECT_EQ(kMaxQueueSize, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_NonProcessedFramesRemoved) {
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 35);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 99);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(4, overuse_detector_->FramesInQueue());
+ overuse_detector_->FrameSent(66);
+ // Frame 33, 35 removed, 66 processed, 99 not processed.
+ EXPECT_EQ(2 * kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ overuse_detector_->FrameSent(99);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_ResetClearsFrames) {
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ // Verify reset (resolution changed).
+ overuse_detector_->FrameCaptured(kWidth, kHeight + 1, 66);
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameSent(66);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_NonMatchingSendFrameIgnored) {
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameSent(34);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc
new file mode 100644
index 0000000000..177f2dd4e8
--- /dev/null
+++ b/webrtc/video/payload_router.cc
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/payload_router.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+PayloadRouter::PayloadRouter()
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ active_(false) {}
+
+PayloadRouter::~PayloadRouter() {}
+
+size_t PayloadRouter::DefaultMaxPayloadLength() {
+ const size_t kIpUdpSrtpLength = 44;
+ return IP_PACKET_SIZE - kIpUdpSrtpLength;
+}
+
+void PayloadRouter::SetSendingRtpModules(
+ const std::list<RtpRtcp*>& rtp_modules) {
+ CriticalSectionScoped cs(crit_.get());
+ rtp_modules_.clear();
+ rtp_modules_.reserve(rtp_modules.size());
+ for (auto* rtp_module : rtp_modules) {
+ rtp_modules_.push_back(rtp_module);
+ }
+}
+
+void PayloadRouter::set_active(bool active) {
+ CriticalSectionScoped cs(crit_.get());
+ active_ = active;
+}
+
+bool PayloadRouter::active() {
+ CriticalSectionScoped cs(crit_.get());
+ return active_ && !rtp_modules_.empty();
+}
+
+bool PayloadRouter::RoutePayload(FrameType frame_type,
+ int8_t payload_type,
+ uint32_t time_stamp,
+ int64_t capture_time_ms,
+ const uint8_t* payload_data,
+ size_t payload_length,
+ const RTPFragmentationHeader* fragmentation,
+ const RTPVideoHeader* rtp_video_hdr) {
+ CriticalSectionScoped cs(crit_.get());
+ if (!active_ || rtp_modules_.empty())
+ return false;
+
+ // The simulcast index might actually be larger than the number of modules in
+ // case the encoder was processing a frame during a codec reconfig.
+ if (rtp_video_hdr != NULL &&
+ rtp_video_hdr->simulcastIdx >= rtp_modules_.size())
+ return false;
+
+ int stream_idx = 0;
+ if (rtp_video_hdr != NULL)
+ stream_idx = rtp_video_hdr->simulcastIdx;
+ return rtp_modules_[stream_idx]->SendOutgoingData(
+ frame_type, payload_type, time_stamp, capture_time_ms, payload_data,
+ payload_length, fragmentation, rtp_video_hdr) == 0 ? true : false;
+}
+
+void PayloadRouter::SetTargetSendBitrates(
+ const std::vector<uint32_t>& stream_bitrates) {
+ CriticalSectionScoped cs(crit_.get());
+ if (stream_bitrates.size() < rtp_modules_.size()) {
+ // There can be a size mis-match during codec reconfiguration.
+ return;
+ }
+ int idx = 0;
+ for (auto* rtp_module : rtp_modules_) {
+ rtp_module->SetTargetSendBitrate(stream_bitrates[idx++]);
+ }
+}
+
+size_t PayloadRouter::MaxPayloadLength() const {
+ size_t min_payload_length = DefaultMaxPayloadLength();
+ CriticalSectionScoped cs(crit_.get());
+ for (auto* rtp_module : rtp_modules_) {
+ size_t module_payload_length = rtp_module->MaxDataPayloadLength();
+ if (module_payload_length < min_payload_length)
+ min_payload_length = module_payload_length;
+ }
+ return min_payload_length;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/payload_router.h b/webrtc/video/payload_router.h
new file mode 100644
index 0000000000..881145976d
--- /dev/null
+++ b/webrtc/video/payload_router.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_PAYLOAD_ROUTER_H_
+#define WEBRTC_VIDEO_PAYLOAD_ROUTER_H_
+
+#include <list>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_types.h"
+#include "webrtc/system_wrappers/include/atomic32.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RTPFragmentationHeader;
+class RtpRtcp;
+struct RTPVideoHeader;
+
+// PayloadRouter routes outgoing data to the correct sending RTP module, based
+// on the simulcast layer in RTPVideoHeader.
+class PayloadRouter {
+ public:
+ PayloadRouter();
+ ~PayloadRouter();
+
+ static size_t DefaultMaxPayloadLength();
+
+ // Rtp modules are assumed to be sorted in simulcast index order.
+ void SetSendingRtpModules(const std::list<RtpRtcp*>& rtp_modules);
+
+ // PayloadRouter will only route packets if being active, all packets will be
+ // dropped otherwise.
+ void set_active(bool active);
+ bool active();
+
+ // Input parameters according to the signature of RtpRtcp::SendOutgoingData.
+ // Returns true if the packet was routed / sent, false otherwise.
+ bool RoutePayload(FrameType frame_type,
+ int8_t payload_type,
+ uint32_t time_stamp,
+ int64_t capture_time_ms,
+ const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* fragmentation,
+ const RTPVideoHeader* rtp_video_hdr);
+
+ // Configures current target bitrate per module. 'stream_bitrates' is assumed
+ // to be in the same order as 'SetSendingRtpModules'.
+ void SetTargetSendBitrates(const std::vector<uint32_t>& stream_bitrates);
+
+ // Returns the maximum allowed data payload length, given the configured MTU
+ // and RTP headers.
+ size_t MaxPayloadLength() const;
+
+ void AddRef() { ++ref_count_; }
+ void Release() { if (--ref_count_ == 0) { delete this; } }
+
+ private:
+ // TODO(mflodman): When the new video API has launched, remove crit_ and
+ // assume rtp_modules_ will never change during a call.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Active sending RTP modules, in layer order.
+ std::vector<RtpRtcp*> rtp_modules_ GUARDED_BY(crit_.get());
+ bool active_ GUARDED_BY(crit_.get());
+
+ Atomic32 ref_count_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(PayloadRouter);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_PAYLOAD_ROUTER_H_
diff --git a/webrtc/video/payload_router_unittest.cc b/webrtc/video/payload_router_unittest.cc
new file mode 100644
index 0000000000..8c22f2fd5c
--- /dev/null
+++ b/webrtc/video/payload_router_unittest.cc
@@ -0,0 +1,209 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <list>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "webrtc/video/payload_router.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::NiceMock;
+using ::testing::Return;
+
+namespace webrtc {
+
+class PayloadRouterTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ payload_router_.reset(new PayloadRouter());
+ }
+ rtc::scoped_ptr<PayloadRouter> payload_router_;
+};
+
+TEST_F(PayloadRouterTest, SendOnOneModule) {
+ MockRtpRtcp rtp;
+ std::list<RtpRtcp*> modules(1, &rtp);
+
+ payload_router_->SetSendingRtpModules(modules);
+
+ uint8_t payload = 'a';
+ FrameType frame_type = kVideoFrameKey;
+ int8_t payload_type = 96;
+
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(1);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(false);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(1);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ modules.clear();
+ payload_router_->SetSendingRtpModules(modules);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+}
+
+TEST_F(PayloadRouterTest, SendSimulcast) {
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+
+ payload_router_->SetSendingRtpModules(modules);
+
+ uint8_t payload_1 = 'a';
+ FrameType frame_type_1 = kVideoFrameKey;
+ int8_t payload_type_1 = 96;
+ RTPVideoHeader rtp_hdr_1;
+ rtp_hdr_1.simulcastIdx = 0;
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp_1, SendOutgoingData(frame_type_1, payload_type_1, 0, 0, _, 1,
+ NULL, &rtp_hdr_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+
+ uint8_t payload_2 = 'b';
+ FrameType frame_type_2 = kVideoFrameDelta;
+ int8_t payload_type_2 = 97;
+ RTPVideoHeader rtp_hdr_2;
+ rtp_hdr_2.simulcastIdx = 1;
+ EXPECT_CALL(rtp_2, SendOutgoingData(frame_type_2, payload_type_2, 0, 0, _, 1,
+ NULL, &rtp_hdr_2))
+ .Times(1);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type_2, payload_type_2, 0, 0,
+ &payload_2, 1, NULL, &rtp_hdr_2));
+
+ // Inactive.
+ payload_router_->set_active(false);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_2, payload_type_2, 0, 0,
+ &payload_2, 1, NULL, &rtp_hdr_2));
+
+ // Invalid simulcast index.
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ rtp_hdr_1.simulcastIdx = 2;
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+}
+
+TEST_F(PayloadRouterTest, MaxPayloadLength) {
+ // Without any limitations from the modules, verify we get the max payload
+ // length for IP/UDP/SRTP with a MTU of 150 bytes.
+ const size_t kDefaultMaxLength = 1500 - 20 - 8 - 12 - 4;
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->DefaultMaxPayloadLength());
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
+
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+ payload_router_->SetSendingRtpModules(modules);
+
+ // Modules return a higher length than the default value.
+ EXPECT_CALL(rtp_1, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kDefaultMaxLength + 10));
+ EXPECT_CALL(rtp_2, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kDefaultMaxLength + 10));
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
+
+ // The modules return a value lower than default.
+ const size_t kTestMinPayloadLength = 1001;
+ EXPECT_CALL(rtp_1, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kTestMinPayloadLength + 10));
+ EXPECT_CALL(rtp_2, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kTestMinPayloadLength));
+ EXPECT_EQ(kTestMinPayloadLength, payload_router_->MaxPayloadLength());
+}
+
+TEST_F(PayloadRouterTest, SetTargetSendBitrates) {
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+ payload_router_->SetSendingRtpModules(modules);
+
+ const uint32_t bitrate_1 = 10000;
+ const uint32_t bitrate_2 = 76543;
+ std::vector<uint32_t> bitrates(2, bitrate_1);
+ bitrates[1] = bitrate_2;
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(1);
+ payload_router_->SetTargetSendBitrates(bitrates);
+
+ bitrates.resize(1);
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(0);
+ payload_router_->SetTargetSendBitrates(bitrates);
+
+ bitrates.resize(3);
+ bitrates[1] = bitrate_2;
+ bitrates[2] = bitrate_1 + bitrate_2;
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(1);
+ payload_router_->SetTargetSendBitrates(bitrates);
+}
+} // namespace webrtc
diff --git a/webrtc/video/rampup_tests.cc b/webrtc/video/rampup_tests.cc
deleted file mode 100644
index 70efe3b9ed..0000000000
--- a/webrtc/video/rampup_tests.cc
+++ /dev/null
@@ -1,509 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/checks.h"
-#include "webrtc/base/common.h"
-#include "webrtc/base/event.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
-#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
-#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
-#include "webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-#include "webrtc/test/testsupport/perf_test.h"
-#include "webrtc/video/rampup_tests.h"
-
-namespace webrtc {
-namespace {
-
-static const int64_t kPollIntervalMs = 20;
-
-std::vector<uint32_t> GenerateSsrcs(size_t num_streams,
- uint32_t ssrc_offset) {
- std::vector<uint32_t> ssrcs;
- for (size_t i = 0; i != num_streams; ++i)
- ssrcs.push_back(static_cast<uint32_t>(ssrc_offset + i));
- return ssrcs;
-}
-} // namespace
-
-RampUpTester::RampUpTester(size_t num_streams,
- unsigned int start_bitrate_bps,
- const std::string& extension_type,
- bool rtx,
- bool red)
- : EndToEndTest(test::CallTest::kLongTimeoutMs),
- event_(false, false),
- clock_(Clock::GetRealTimeClock()),
- num_streams_(num_streams),
- rtx_(rtx),
- red_(red),
- send_stream_(nullptr),
- start_bitrate_bps_(start_bitrate_bps),
- start_bitrate_verified_(false),
- expected_bitrate_bps_(0),
- test_start_ms_(-1),
- ramp_up_finished_ms_(-1),
- extension_type_(extension_type),
- ssrcs_(GenerateSsrcs(num_streams, 100)),
- rtx_ssrcs_(GenerateSsrcs(num_streams, 200)),
- poller_thread_(ThreadWrapper::CreateThread(&BitrateStatsPollingThread,
- this,
- "BitrateStatsPollingThread")),
- sender_call_(nullptr) {
- if (rtx_) {
- for (size_t i = 0; i < ssrcs_.size(); ++i)
- rtx_ssrc_map_[rtx_ssrcs_[i]] = ssrcs_[i];
- }
-}
-
-RampUpTester::~RampUpTester() {
- event_.Set();
-}
-
-Call::Config RampUpTester::GetSenderCallConfig() {
- Call::Config call_config;
- if (start_bitrate_bps_ != 0) {
- call_config.bitrate_config.start_bitrate_bps = start_bitrate_bps_;
- }
- call_config.bitrate_config.min_bitrate_bps = 10000;
- return call_config;
-}
-
-void RampUpTester::OnStreamsCreated(
- VideoSendStream* send_stream,
- const std::vector<VideoReceiveStream*>& receive_streams) {
- send_stream_ = send_stream;
-}
-
-void RampUpTester::OnTransportsCreated(
- test::PacketTransport* send_transport,
- test::PacketTransport* receive_transport) {
- send_transport_ = send_transport;
- send_transport_->SetConfig(forward_transport_config_);
-}
-
-size_t RampUpTester::GetNumStreams() const {
- return num_streams_;
-}
-
-void RampUpTester::ModifyConfigs(
- VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) {
- send_config->suspend_below_min_bitrate = true;
-
- if (num_streams_ == 1) {
- encoder_config->streams[0].target_bitrate_bps =
- encoder_config->streams[0].max_bitrate_bps = 2000000;
- // For single stream rampup until 1mbps
- expected_bitrate_bps_ = kSingleStreamTargetBps;
- } else {
- // For multi stream rampup until all streams are being sent. That means
- // enough birate to send all the target streams plus the min bitrate of
- // the last one.
- expected_bitrate_bps_ = encoder_config->streams.back().min_bitrate_bps;
- for (size_t i = 0; i < encoder_config->streams.size() - 1; ++i) {
- expected_bitrate_bps_ += encoder_config->streams[i].target_bitrate_bps;
- }
- }
-
- send_config->rtp.extensions.clear();
-
- bool remb;
- if (extension_type_ == RtpExtension::kAbsSendTime) {
- remb = true;
- send_config->rtp.extensions.push_back(
- RtpExtension(extension_type_.c_str(), kAbsSendTimeExtensionId));
- } else if (extension_type_ == RtpExtension::kTransportSequenceNumber) {
- remb = false;
- send_config->rtp.extensions.push_back(RtpExtension(
- extension_type_.c_str(), kTransportSequenceNumberExtensionId));
- } else {
- remb = true;
- send_config->rtp.extensions.push_back(RtpExtension(
- extension_type_.c_str(), kTransmissionTimeOffsetExtensionId));
- }
-
- send_config->rtp.nack.rtp_history_ms = test::CallTest::kNackRtpHistoryMs;
- send_config->rtp.ssrcs = ssrcs_;
- if (rtx_) {
- send_config->rtp.rtx.payload_type = test::CallTest::kSendRtxPayloadType;
- send_config->rtp.rtx.ssrcs = rtx_ssrcs_;
- }
- if (red_) {
- send_config->rtp.fec.ulpfec_payload_type =
- test::CallTest::kUlpfecPayloadType;
- send_config->rtp.fec.red_payload_type = test::CallTest::kRedPayloadType;
- }
-
- size_t i = 0;
- for (VideoReceiveStream::Config& recv_config : *receive_configs) {
- recv_config.rtp.remb = remb;
- recv_config.rtp.extensions = send_config->rtp.extensions;
-
- recv_config.rtp.remote_ssrc = ssrcs_[i];
- recv_config.rtp.nack.rtp_history_ms = send_config->rtp.nack.rtp_history_ms;
-
- if (red_) {
- recv_config.rtp.fec.red_payload_type =
- send_config->rtp.fec.red_payload_type;
- recv_config.rtp.fec.ulpfec_payload_type =
- send_config->rtp.fec.ulpfec_payload_type;
- }
-
- if (rtx_) {
- recv_config.rtp.rtx[send_config->encoder_settings.payload_type].ssrc =
- rtx_ssrcs_[i];
- recv_config.rtp.rtx[send_config->encoder_settings.payload_type]
- .payload_type = send_config->rtp.rtx.payload_type;
- }
- ++i;
- }
-}
-
-void RampUpTester::OnCallsCreated(Call* sender_call, Call* receiver_call) {
- sender_call_ = sender_call;
-}
-
-bool RampUpTester::BitrateStatsPollingThread(void* obj) {
- return static_cast<RampUpTester*>(obj)->PollStats();
-}
-
-bool RampUpTester::PollStats() {
- if (sender_call_) {
- Call::Stats stats = sender_call_->GetStats();
-
- RTC_DCHECK_GT(expected_bitrate_bps_, 0);
- if (!start_bitrate_verified_ && start_bitrate_bps_ != 0) {
- // For tests with an explicitly set start bitrate, verify the first
- // bitrate estimate is close to the start bitrate and lower than the
- // test target bitrate. This is to verify a call respects the configured
- // start bitrate, but due to the BWE implementation we can't guarantee the
- // first estimate really is as high as the start bitrate.
- EXPECT_GT(stats.send_bandwidth_bps, 0.9 * start_bitrate_bps_);
- start_bitrate_verified_ = true;
- }
- if (stats.send_bandwidth_bps >= expected_bitrate_bps_) {
- ramp_up_finished_ms_ = clock_->TimeInMilliseconds();
- observation_complete_->Set();
- }
- }
-
- return !event_.Wait(kPollIntervalMs);
-}
-
-void RampUpTester::ReportResult(const std::string& measurement,
- size_t value,
- const std::string& units) const {
- webrtc::test::PrintResult(
- measurement, "",
- ::testing::UnitTest::GetInstance()->current_test_info()->name(),
- value, units, false);
-}
-
-void RampUpTester::AccumulateStats(const VideoSendStream::StreamStats& stream,
- size_t* total_packets_sent,
- size_t* total_sent,
- size_t* padding_sent,
- size_t* media_sent) const {
- *total_packets_sent += stream.rtp_stats.transmitted.packets +
- stream.rtp_stats.retransmitted.packets +
- stream.rtp_stats.fec.packets;
- *total_sent += stream.rtp_stats.transmitted.TotalBytes() +
- stream.rtp_stats.retransmitted.TotalBytes() +
- stream.rtp_stats.fec.TotalBytes();
- *padding_sent += stream.rtp_stats.transmitted.padding_bytes +
- stream.rtp_stats.retransmitted.padding_bytes +
- stream.rtp_stats.fec.padding_bytes;
- *media_sent += stream.rtp_stats.MediaPayloadBytes();
-}
-
-void RampUpTester::TriggerTestDone() {
- RTC_DCHECK_GE(test_start_ms_, 0);
-
- VideoSendStream::Stats send_stats = send_stream_->GetStats();
-
- size_t total_packets_sent = 0;
- size_t total_sent = 0;
- size_t padding_sent = 0;
- size_t media_sent = 0;
- for (uint32_t ssrc : ssrcs_) {
- AccumulateStats(send_stats.substreams[ssrc], &total_packets_sent,
- &total_sent, &padding_sent, &media_sent);
- }
-
- size_t rtx_total_packets_sent = 0;
- size_t rtx_total_sent = 0;
- size_t rtx_padding_sent = 0;
- size_t rtx_media_sent = 0;
- for (uint32_t rtx_ssrc : rtx_ssrcs_) {
- AccumulateStats(send_stats.substreams[rtx_ssrc], &rtx_total_packets_sent,
- &rtx_total_sent, &rtx_padding_sent, &rtx_media_sent);
- }
-
- ReportResult("ramp-up-total-packets-sent", total_packets_sent, "packets");
- ReportResult("ramp-up-total-sent", total_sent, "bytes");
- ReportResult("ramp-up-media-sent", media_sent, "bytes");
- ReportResult("ramp-up-padding-sent", padding_sent, "bytes");
- ReportResult("ramp-up-rtx-total-packets-sent", rtx_total_packets_sent,
- "packets");
- ReportResult("ramp-up-rtx-total-sent", rtx_total_sent, "bytes");
- ReportResult("ramp-up-rtx-media-sent", rtx_media_sent, "bytes");
- ReportResult("ramp-up-rtx-padding-sent", rtx_padding_sent, "bytes");
- if (ramp_up_finished_ms_ >= 0) {
- ReportResult("ramp-up-time", ramp_up_finished_ms_ - test_start_ms_,
- "milliseconds");
- }
-}
-
-void RampUpTester::PerformTest() {
- test_start_ms_ = clock_->TimeInMilliseconds();
- poller_thread_->Start();
- if (Wait() != kEventSignaled) {
- printf("Timed out while waiting for ramp-up to complete.");
- return;
- }
- TriggerTestDone();
- poller_thread_->Stop();
-}
-
-RampUpDownUpTester::RampUpDownUpTester(size_t num_streams,
- unsigned int start_bitrate_bps,
- const std::string& extension_type,
- bool rtx,
- bool red)
- : RampUpTester(num_streams, start_bitrate_bps, extension_type, rtx, red),
- test_state_(kFirstRampup),
- state_start_ms_(clock_->TimeInMilliseconds()),
- interval_start_ms_(clock_->TimeInMilliseconds()),
- sent_bytes_(0) {
- forward_transport_config_.link_capacity_kbps =
- kHighBandwidthLimitBps / 1000;
-}
-
-RampUpDownUpTester::~RampUpDownUpTester() {}
-
-bool RampUpDownUpTester::PollStats() {
- if (send_stream_) {
- webrtc::VideoSendStream::Stats stats = send_stream_->GetStats();
- int transmit_bitrate_bps = 0;
- for (auto it : stats.substreams) {
- transmit_bitrate_bps += it.second.total_bitrate_bps;
- }
-
- EvolveTestState(transmit_bitrate_bps, stats.suspended);
- }
-
- return !event_.Wait(kPollIntervalMs);
-}
-
-Call::Config RampUpDownUpTester::GetReceiverCallConfig() {
- Call::Config config;
- config.bitrate_config.min_bitrate_bps = 10000;
- return config;
-}
-
-std::string RampUpDownUpTester::GetModifierString() const {
- std::string str("_");
- char temp_str[5];
- sprintf(temp_str, "%i", static_cast<int>(num_streams_));
- str += std::string(temp_str);
- str += "stream";
- str += (num_streams_ > 1 ? "s" : "");
- str += "_";
- str += (rtx_ ? "" : "no");
- str += "rtx";
- return str;
-}
-
-void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) {
- int64_t now = clock_->TimeInMilliseconds();
- switch (test_state_) {
- case kFirstRampup: {
- EXPECT_FALSE(suspended);
- if (bitrate_bps > kExpectedHighBitrateBps) {
- // The first ramp-up has reached the target bitrate. Change the
- // channel limit, and move to the next test state.
- forward_transport_config_.link_capacity_kbps =
- kLowBandwidthLimitBps / 1000;
- send_transport_->SetConfig(forward_transport_config_);
- test_state_ = kLowRate;
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "first_rampup",
- now - state_start_ms_,
- "ms",
- false);
- state_start_ms_ = now;
- interval_start_ms_ = now;
- sent_bytes_ = 0;
- }
- break;
- }
- case kLowRate: {
- if (bitrate_bps < kExpectedLowBitrateBps && suspended) {
- // The ramp-down was successful. Change the channel limit back to a
- // high value, and move to the next test state.
- forward_transport_config_.link_capacity_kbps =
- kHighBandwidthLimitBps / 1000;
- send_transport_->SetConfig(forward_transport_config_);
- test_state_ = kSecondRampup;
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "rampdown",
- now - state_start_ms_,
- "ms",
- false);
- state_start_ms_ = now;
- interval_start_ms_ = now;
- sent_bytes_ = 0;
- }
- break;
- }
- case kSecondRampup: {
- if (bitrate_bps > kExpectedHighBitrateBps && !suspended) {
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "second_rampup",
- now - state_start_ms_,
- "ms",
- false);
- observation_complete_->Set();
- }
- break;
- }
- }
-}
-
-class RampUpTest : public test::CallTest {
- public:
- RampUpTest() {}
-
- virtual ~RampUpTest() {
- EXPECT_EQ(nullptr, send_stream_);
- EXPECT_TRUE(receive_streams_.empty());
- }
-};
-
-TEST_F(RampUpTest, SingleStream) {
- RampUpTester test(1, 0, RtpExtension::kTOffset, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, Simulcast) {
- RampUpTester test(3, 0, RtpExtension::kTOffset, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, SimulcastWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kTOffset, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, SimulcastByRedWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kTOffset, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, SingleStreamWithHighStartBitrate) {
- RampUpTester test(1, 0.9 * kSingleStreamTargetBps, RtpExtension::kTOffset,
- false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpOneStream) {
- RampUpDownUpTester test(1, 60000, RtpExtension::kAbsSendTime, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpThreeStreams) {
- RampUpDownUpTester test(3, 60000, RtpExtension::kAbsSendTime, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpOneStreamRtx) {
- RampUpDownUpTester test(1, 60000, RtpExtension::kAbsSendTime, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpThreeStreamsRtx) {
- RampUpDownUpTester test(3, 60000, RtpExtension::kAbsSendTime, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpOneStreamByRedRtx) {
- RampUpDownUpTester test(1, 60000, RtpExtension::kAbsSendTime, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpThreeStreamsByRedRtx) {
- RampUpDownUpTester test(3, 60000, RtpExtension::kAbsSendTime, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSingleStream) {
- RampUpTester test(1, 0, RtpExtension::kAbsSendTime, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSimulcast) {
- RampUpTester test(3, 0, RtpExtension::kAbsSendTime, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSimulcastWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kAbsSendTime, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSimulcastByRedWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kAbsSendTime, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSingleStreamWithHighStartBitrate) {
- RampUpTester test(1, 0.9 * kSingleStreamTargetBps, RtpExtension::kAbsSendTime,
- false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSingleStream) {
- RampUpTester test(1, 0, RtpExtension::kTransportSequenceNumber, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSimulcast) {
- RampUpTester test(3, 0, RtpExtension::kTransportSequenceNumber, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSimulcastWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kTransportSequenceNumber, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSimulcastByRedWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kTransportSequenceNumber, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSingleStreamWithHighStartBitrate) {
- RampUpTester test(1, 0.9 * kSingleStreamTargetBps,
- RtpExtension::kTransportSequenceNumber, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-} // namespace webrtc
diff --git a/webrtc/video/rampup_tests.h b/webrtc/video/rampup_tests.h
deleted file mode 100644
index ff65c8d0a0..0000000000
--- a/webrtc/video/rampup_tests.h
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_RAMPUP_TESTS_H_
-#define WEBRTC_VIDEO_RAMPUP_TESTS_H_
-
-#include <map>
-#include <string>
-#include <vector>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/call.h"
-#include "webrtc/call/transport_adapter.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/test/call_test.h"
-
-namespace webrtc {
-
-static const int kTransmissionTimeOffsetExtensionId = 6;
-static const int kAbsSendTimeExtensionId = 7;
-static const int kTransportSequenceNumberExtensionId = 8;
-static const unsigned int kSingleStreamTargetBps = 1000000;
-
-class Clock;
-class PacketRouter;
-class ReceiveStatistics;
-class RtpHeaderParser;
-class RTPPayloadRegistry;
-class RtpRtcp;
-
-class RampUpTester : public test::EndToEndTest {
- public:
- RampUpTester(size_t num_streams,
- unsigned int start_bitrate_bps,
- const std::string& extension_type,
- bool rtx,
- bool red);
- ~RampUpTester() override;
-
- void PerformTest() override;
-
- protected:
- virtual bool PollStats();
-
- void AccumulateStats(const VideoSendStream::StreamStats& stream,
- size_t* total_packets_sent,
- size_t* total_sent,
- size_t* padding_sent,
- size_t* media_sent) const;
-
- void ReportResult(const std::string& measurement,
- size_t value,
- const std::string& units) const;
- void TriggerTestDone();
-
- rtc::Event event_;
- Clock* const clock_;
- FakeNetworkPipe::Config forward_transport_config_;
- const size_t num_streams_;
- const bool rtx_;
- const bool red_;
- VideoSendStream* send_stream_;
- test::PacketTransport* send_transport_;
-
- private:
- typedef std::map<uint32_t, uint32_t> SsrcMap;
-
- Call::Config GetSenderCallConfig() override;
- void OnStreamsCreated(
- VideoSendStream* send_stream,
- const std::vector<VideoReceiveStream*>& receive_streams) override;
- void OnTransportsCreated(test::PacketTransport* send_transport,
- test::PacketTransport* receive_transport) override;
- size_t GetNumStreams() const;
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override;
- void OnCallsCreated(Call* sender_call, Call* receiver_call) override;
-
- static bool BitrateStatsPollingThread(void* obj);
-
- const int start_bitrate_bps_;
- bool start_bitrate_verified_;
- int expected_bitrate_bps_;
- int64_t test_start_ms_;
- int64_t ramp_up_finished_ms_;
-
- const std::string extension_type_;
- std::vector<uint32_t> ssrcs_;
- std::vector<uint32_t> rtx_ssrcs_;
- SsrcMap rtx_ssrc_map_;
-
- rtc::scoped_ptr<ThreadWrapper> poller_thread_;
- Call* sender_call_;
-};
-
-class RampUpDownUpTester : public RampUpTester {
- public:
- RampUpDownUpTester(size_t num_streams,
- unsigned int start_bitrate_bps,
- const std::string& extension_type,
- bool rtx,
- bool red);
- ~RampUpDownUpTester() override;
-
- protected:
- bool PollStats() override;
-
- private:
- static const int kHighBandwidthLimitBps = 80000;
- static const int kExpectedHighBitrateBps = 60000;
- static const int kLowBandwidthLimitBps = 20000;
- static const int kExpectedLowBitrateBps = 20000;
- enum TestStates { kFirstRampup, kLowRate, kSecondRampup };
-
- Call::Config GetReceiverCallConfig() override;
-
- std::string GetModifierString() const;
- void EvolveTestState(int bitrate_bps, bool suspended);
-
- TestStates test_state_;
- int64_t state_start_ms_;
- int64_t interval_start_ms_;
- int sent_bytes_;
-};
-} // namespace webrtc
-#endif // WEBRTC_VIDEO_RAMPUP_TESTS_H_
diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc
index eec2bc8301..d6ab4ff361 100644
--- a/webrtc/video/receive_statistics_proxy.cc
+++ b/webrtc/video/receive_statistics_proxy.cc
@@ -13,7 +13,7 @@
#include <cmath>
#include "webrtc/base/checks.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/metrics.h"
@@ -37,37 +37,39 @@ ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {
void ReceiveStatisticsProxy::UpdateHistograms() {
int fraction_lost = report_block_stats_.FractionLostInPercent();
if (fraction_lost != -1) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent",
- fraction_lost);
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE("WebRTC.Video.ReceivedPacketsLostInPercent",
+ fraction_lost);
}
const int kMinRequiredSamples = 200;
int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount());
if (samples > kMinRequiredSamples) {
- RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond",
- static_cast<int>(render_fps_tracker_.ComputeTotalRate()));
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.RenderSqrtPixelsPerSecond",
- static_cast<int>(render_pixel_tracker_.ComputeTotalRate()));
+ RTC_HISTOGRAM_COUNTS_SPARSE_100("WebRTC.Video.RenderFramesPerSecond",
+ round(render_fps_tracker_.ComputeTotalRate()));
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Video.RenderSqrtPixelsPerSecond",
+ round(render_pixel_tracker_.ComputeTotalRate()));
}
int width = render_width_counter_.Avg(kMinRequiredSamples);
int height = render_height_counter_.Avg(kMinRequiredSamples);
if (width != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedWidthInPixels", width);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedHeightInPixels", height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000("WebRTC.Video.ReceivedWidthInPixels",
+ width);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000("WebRTC.Video.ReceivedHeightInPixels",
+ height);
}
int qp = qp_counters_.vp8.Avg(kMinRequiredSamples);
if (qp != -1)
- RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", qp);
+ RTC_HISTOGRAM_COUNTS_SPARSE_200("WebRTC.Video.Decoded.Vp8.Qp", qp);
// TODO(asapersson): DecoderTiming() is call periodically (each 1000ms) and
// not per frame. Change decode time to include every frame.
const int kMinRequiredDecodeSamples = 5;
int decode_ms = decode_time_counter_.Avg(kMinRequiredDecodeSamples);
if (decode_ms != -1)
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", decode_ms);
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000("WebRTC.Video.DecodeTimeInMs", decode_ms);
int delay_ms = delay_counter_.Avg(kMinRequiredDecodeSamples);
if (delay_ms != -1)
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
}
VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
@@ -80,6 +82,11 @@ void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) {
stats_.current_payload_type = payload_type;
}
+void ReceiveStatisticsProxy::OnDecoderImplementationName(
+ const char* implementation_name) {
+ rtc::CritScope lock(&crit_);
+ stats_.decoder_implementation_name = implementation_name;
+}
void ReceiveStatisticsProxy::OnIncomingRate(unsigned int framerate,
unsigned int bitrate_bps) {
rtc::CritScope lock(&crit_);
diff --git a/webrtc/video/receive_statistics_proxy.h b/webrtc/video/receive_statistics_proxy.h
index b6741f9cde..87cb9506a9 100644
--- a/webrtc/video/receive_statistics_proxy.h
+++ b/webrtc/video/receive_statistics_proxy.h
@@ -19,9 +19,9 @@
#include "webrtc/common_types.h"
#include "webrtc/frame_callback.h"
#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/video_engine/report_block_stats.h"
-#include "webrtc/video_engine/vie_channel.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/video/report_block_stats.h"
+#include "webrtc/video/vie_channel.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_renderer.h"
@@ -45,6 +45,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
void OnDecodedFrame();
void OnRenderedFrame(int width, int height);
void OnIncomingPayloadType(int payload_type);
+ void OnDecoderImplementationName(const char* implementation_name);
void OnIncomingRate(unsigned int framerate, unsigned int bitrate_bps);
void OnDecoderTiming(int decode_ms,
int max_decode_ms,
diff --git a/webrtc/video/replay.cc b/webrtc/video/replay.cc
index f54909e55f..484924872b 100644
--- a/webrtc/video/replay.cc
+++ b/webrtc/video/replay.cc
@@ -20,7 +20,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/test/encoder_settings.h"
diff --git a/webrtc/video/report_block_stats.cc b/webrtc/video/report_block_stats.cc
new file mode 100644
index 0000000000..dee5662c3c
--- /dev/null
+++ b/webrtc/video/report_block_stats.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/report_block_stats.h"
+
+namespace webrtc {
+
+namespace {
+int FractionLost(uint32_t num_lost_sequence_numbers,
+ uint32_t num_sequence_numbers) {
+ if (num_sequence_numbers == 0) {
+ return 0;
+ }
+ return ((num_lost_sequence_numbers * 255) + (num_sequence_numbers / 2)) /
+ num_sequence_numbers;
+}
+} // namespace
+
+
+// Helper class for rtcp statistics.
+ReportBlockStats::ReportBlockStats()
+ : num_sequence_numbers_(0),
+ num_lost_sequence_numbers_(0) {
+}
+
+void ReportBlockStats::Store(const RtcpStatistics& rtcp_stats,
+ uint32_t remote_ssrc,
+ uint32_t source_ssrc) {
+ RTCPReportBlock block;
+ block.cumulativeLost = rtcp_stats.cumulative_lost;
+ block.fractionLost = rtcp_stats.fraction_lost;
+ block.extendedHighSeqNum = rtcp_stats.extended_max_sequence_number;
+ block.jitter = rtcp_stats.jitter;
+ block.remoteSSRC = remote_ssrc;
+ block.sourceSSRC = source_ssrc;
+ uint32_t num_sequence_numbers = 0;
+ uint32_t num_lost_sequence_numbers = 0;
+ StoreAndAddPacketIncrement(
+ block, &num_sequence_numbers, &num_lost_sequence_numbers);
+}
+
+RTCPReportBlock ReportBlockStats::AggregateAndStore(
+ const ReportBlockVector& report_blocks) {
+ RTCPReportBlock aggregate;
+ if (report_blocks.empty()) {
+ return aggregate;
+ }
+ uint32_t num_sequence_numbers = 0;
+ uint32_t num_lost_sequence_numbers = 0;
+ ReportBlockVector::const_iterator report_block = report_blocks.begin();
+ for (; report_block != report_blocks.end(); ++report_block) {
+ aggregate.cumulativeLost += report_block->cumulativeLost;
+ aggregate.jitter += report_block->jitter;
+ StoreAndAddPacketIncrement(*report_block,
+ &num_sequence_numbers,
+ &num_lost_sequence_numbers);
+ }
+
+ if (report_blocks.size() == 1) {
+ // No aggregation needed.
+ return report_blocks[0];
+ }
+ // Fraction lost since previous report block.
+ aggregate.fractionLost =
+ FractionLost(num_lost_sequence_numbers, num_sequence_numbers);
+ aggregate.jitter = static_cast<uint32_t>(
+ (aggregate.jitter + report_blocks.size() / 2) / report_blocks.size());
+ return aggregate;
+}
+
+void ReportBlockStats::StoreAndAddPacketIncrement(
+ const RTCPReportBlock& report_block,
+ uint32_t* num_sequence_numbers,
+ uint32_t* num_lost_sequence_numbers) {
+ // Get diff with previous report block.
+ ReportBlockMap::iterator prev_report_block = prev_report_blocks_.find(
+ report_block.sourceSSRC);
+ if (prev_report_block != prev_report_blocks_.end()) {
+ int seq_num_diff = report_block.extendedHighSeqNum -
+ prev_report_block->second.extendedHighSeqNum;
+ int cum_loss_diff = report_block.cumulativeLost -
+ prev_report_block->second.cumulativeLost;
+ if (seq_num_diff >= 0 && cum_loss_diff >= 0) {
+ *num_sequence_numbers += seq_num_diff;
+ *num_lost_sequence_numbers += cum_loss_diff;
+ // Update total number of packets/lost packets.
+ num_sequence_numbers_ += seq_num_diff;
+ num_lost_sequence_numbers_ += cum_loss_diff;
+ }
+ }
+ // Store current report block.
+ prev_report_blocks_[report_block.sourceSSRC] = report_block;
+}
+
+int ReportBlockStats::FractionLostInPercent() const {
+ if (num_sequence_numbers_ == 0) {
+ return -1;
+ }
+ return FractionLost(
+ num_lost_sequence_numbers_, num_sequence_numbers_) * 100 / 255;
+}
+
+} // namespace webrtc
+
diff --git a/webrtc/video/report_block_stats.h b/webrtc/video/report_block_stats.h
new file mode 100644
index 0000000000..c54e4677f4
--- /dev/null
+++ b/webrtc/video/report_block_stats.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_REPORT_BLOCK_STATS_H_
+#define WEBRTC_VIDEO_REPORT_BLOCK_STATS_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+// Helper class for rtcp statistics.
+class ReportBlockStats {
+ public:
+ typedef std::map<uint32_t, RTCPReportBlock> ReportBlockMap;
+ typedef std::vector<RTCPReportBlock> ReportBlockVector;
+ ReportBlockStats();
+ ~ReportBlockStats() {}
+
+ // Updates stats and stores report blocks.
+ // Returns an aggregate of the |report_blocks|.
+ RTCPReportBlock AggregateAndStore(const ReportBlockVector& report_blocks);
+
+ // Updates stats and stores report block.
+ void Store(const RtcpStatistics& rtcp_stats,
+ uint32_t remote_ssrc,
+ uint32_t source_ssrc);
+
+ // Returns the total fraction of lost packets (or -1 if less than two report
+ // blocks have been stored).
+ int FractionLostInPercent() const;
+
+ private:
+ // Updates the total number of packets/lost packets.
+ // Stores the report block.
+ // Returns the number of packets/lost packets since previous report block.
+ void StoreAndAddPacketIncrement(const RTCPReportBlock& report_block,
+ uint32_t* num_sequence_numbers,
+ uint32_t* num_lost_sequence_numbers);
+
+ // The total number of packets/lost packets.
+ uint32_t num_sequence_numbers_;
+ uint32_t num_lost_sequence_numbers_;
+
+ // Map holding the last stored report block (mapped by the source SSRC).
+ ReportBlockMap prev_report_blocks_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_REPORT_BLOCK_STATS_H_
+
diff --git a/webrtc/video/report_block_stats_unittest.cc b/webrtc/video/report_block_stats_unittest.cc
new file mode 100644
index 0000000000..5cde9004b1
--- /dev/null
+++ b/webrtc/video/report_block_stats_unittest.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/video/report_block_stats.h"
+
+namespace webrtc {
+
+class ReportBlockStatsTest : public ::testing::Test {
+ protected:
+ ReportBlockStatsTest() : kSsrc1(0x12345), kSsrc2(0x23456) {}
+
+ void SetUp() override {
+ // kSsrc1: block 1-3.
+ block1_1_.cumulativeLost = 10;
+ block1_1_.fractionLost = 123;
+ block1_1_.extendedHighSeqNum = 24000;
+ block1_1_.jitter = 777;
+ block1_1_.sourceSSRC = kSsrc1;
+ block1_2_.cumulativeLost = 15;
+ block1_2_.fractionLost = 0;
+ block1_2_.extendedHighSeqNum = 24100;
+ block1_2_.jitter = 222;
+ block1_2_.sourceSSRC = kSsrc1;
+ block1_3_.cumulativeLost = 50;
+ block1_3_.fractionLost = 0;
+ block1_3_.extendedHighSeqNum = 24200;
+ block1_3_.jitter = 333;
+ block1_3_.sourceSSRC = kSsrc1;
+ // kSsrc2: block 1,2.
+ block2_1_.cumulativeLost = 111;
+ block2_1_.fractionLost = 222;
+ block2_1_.extendedHighSeqNum = 8500;
+ block2_1_.jitter = 555;
+ block2_1_.sourceSSRC = kSsrc2;
+ block2_2_.cumulativeLost = 136;
+ block2_2_.fractionLost = 0;
+ block2_2_.extendedHighSeqNum = 8800;
+ block2_2_.jitter = 888;
+ block2_2_.sourceSSRC = kSsrc2;
+
+ ssrc1block1_.push_back(block1_1_);
+ ssrc1block2_.push_back(block1_2_);
+ ssrc12block1_.push_back(block1_1_);
+ ssrc12block1_.push_back(block2_1_);
+ ssrc12block2_.push_back(block1_2_);
+ ssrc12block2_.push_back(block2_2_);
+ }
+
+ RtcpStatistics RtcpReportBlockToRtcpStatistics(
+ const RTCPReportBlock& stats) {
+ RtcpStatistics block;
+ block.cumulative_lost = stats.cumulativeLost;
+ block.fraction_lost = stats.fractionLost;
+ block.extended_max_sequence_number = stats.extendedHighSeqNum;
+ block.jitter = stats.jitter;
+ return block;
+ }
+
+ const uint32_t kSsrc1;
+ const uint32_t kSsrc2;
+ RTCPReportBlock block1_1_;
+ RTCPReportBlock block1_2_;
+ RTCPReportBlock block1_3_;
+ RTCPReportBlock block2_1_;
+ RTCPReportBlock block2_2_;
+ std::vector<RTCPReportBlock> ssrc1block1_;
+ std::vector<RTCPReportBlock> ssrc1block2_;
+ std::vector<RTCPReportBlock> ssrc12block1_;
+ std::vector<RTCPReportBlock> ssrc12block2_;
+};
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_NoSsrc) {
+ ReportBlockStats stats;
+ std::vector<RTCPReportBlock> empty;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(empty);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(0U, aggregated.cumulativeLost);
+ EXPECT_EQ(0U, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+}
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_OneSsrc) {
+ ReportBlockStats stats;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(ssrc1block1_);
+ // One ssrc, no aggregation done.
+ EXPECT_EQ(123U, aggregated.fractionLost);
+ EXPECT_EQ(10U, aggregated.cumulativeLost);
+ EXPECT_EQ(777U, aggregated.jitter);
+ EXPECT_EQ(24000U, aggregated.extendedHighSeqNum);
+
+ aggregated = stats.AggregateAndStore(ssrc1block2_);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(15U, aggregated.cumulativeLost);
+ EXPECT_EQ(222U, aggregated.jitter);
+ EXPECT_EQ(24100U, aggregated.extendedHighSeqNum);
+
+ // fl: 100 * (15-10) / (24100-24000) = 5%
+ EXPECT_EQ(5, stats.FractionLostInPercent());
+}
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_TwoSsrcs) {
+ ReportBlockStats stats;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(ssrc12block1_);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(10U + 111U, aggregated.cumulativeLost);
+ EXPECT_EQ((777U + 555U) / 2, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+
+ aggregated = stats.AggregateAndStore(ssrc12block2_);
+ // fl: 255 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 19
+ EXPECT_EQ(19U, aggregated.fractionLost);
+ EXPECT_EQ(15U + 136U, aggregated.cumulativeLost);
+ EXPECT_EQ((222U + 888U) / 2, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+
+ // fl: 100 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 7%
+ EXPECT_EQ(7, stats.FractionLostInPercent());
+}
+
+TEST_F(ReportBlockStatsTest, StoreAndGetFractionLost) {
+ const uint32_t kRemoteSsrc = 1;
+ ReportBlockStats stats;
+ EXPECT_EQ(-1, stats.FractionLostInPercent());
+
+ // First block.
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_1_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(-1, stats.FractionLostInPercent());
+ // fl: 100 * (15-10) / (24100-24000) = 5%
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_2_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(5, stats.FractionLostInPercent());
+ // fl: 100 * (50-10) / (24200-24000) = 20%
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_3_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(20, stats.FractionLostInPercent());
+}
+
+} // namespace webrtc
+
diff --git a/webrtc/video/screenshare_loopback.cc b/webrtc/video/screenshare_loopback.cc
index 9897783eb9..6479aa4ebb 100644
--- a/webrtc/video/screenshare_loopback.cc
+++ b/webrtc/video/screenshare_loopback.cc
@@ -20,6 +20,7 @@
namespace webrtc {
namespace flags {
+// Flags common with video loopback, with different default values.
DEFINE_int32(width, 1850, "Video width (crops source).");
size_t Width() {
return static_cast<size_t>(FLAGS_width);
@@ -35,21 +36,6 @@ int Fps() {
return static_cast<int>(FLAGS_fps);
}
-DEFINE_int32(slide_change_interval,
- 10,
- "Interval (in seconds) between simulated slide changes.");
-int SlideChangeInterval() {
- return static_cast<int>(FLAGS_slide_change_interval);
-}
-
-DEFINE_int32(
- scroll_duration,
- 0,
- "Duration (in seconds) during which a slide will be scrolled into place.");
-int ScrollDuration() {
- return static_cast<int>(FLAGS_scroll_duration);
-}
-
DEFINE_int32(min_bitrate, 50, "Call and stream min bitrate in kbps.");
int MinBitrateKbps() {
return static_cast<int>(FLAGS_min_bitrate);
@@ -71,26 +57,41 @@ int MaxBitrateKbps() {
}
DEFINE_int32(num_temporal_layers, 2, "Number of temporal layers to use.");
-size_t NumTemporalLayers() {
- return static_cast<size_t>(FLAGS_num_temporal_layers);
+int NumTemporalLayers() {
+ return static_cast<int>(FLAGS_num_temporal_layers);
+}
+
+// Flags common with video loopback, with equal default values.
+DEFINE_string(codec, "VP8", "Video codec to use.");
+std::string Codec() {
+ return static_cast<std::string>(FLAGS_codec);
+}
+
+DEFINE_int32(selected_tl,
+ -1,
+ "Temporal layer to show or analyze. -1 to disable filtering.");
+int SelectedTL() {
+ return static_cast<int>(FLAGS_selected_tl);
}
DEFINE_int32(
- tl_discard_threshold,
+ duration,
0,
- "Discard TLs with id greater or equal the threshold. 0 to disable.");
-size_t TLDiscardThreshold() {
- return static_cast<size_t>(FLAGS_tl_discard_threshold);
+ "Duration of the test in seconds. If 0, rendered will be shown instead.");
+int DurationSecs() {
+ return static_cast<int>(FLAGS_duration);
}
-DEFINE_int32(min_transmit_bitrate, 400, "Min transmit bitrate incl. padding.");
-int MinTransmitBitrateKbps() {
- return FLAGS_min_transmit_bitrate;
+DEFINE_string(output_filename, "", "Target graph data filename.");
+std::string OutputFilename() {
+ return static_cast<std::string>(FLAGS_output_filename);
}
-DEFINE_string(codec, "VP8", "Video codec to use.");
-std::string Codec() {
- return static_cast<std::string>(FLAGS_codec);
+DEFINE_string(graph_title,
+ "",
+ "If empty, title will be generated automatically.");
+std::string GraphTitle() {
+ return static_cast<std::string>(FLAGS_graph_title);
}
DEFINE_int32(loss_percent, 0, "Percentage of packets randomly lost.");
@@ -124,21 +125,53 @@ int StdPropagationDelayMs() {
return static_cast<int>(FLAGS_std_propagation_delay_ms);
}
-DEFINE_bool(logs, false, "print logs to stderr");
+DEFINE_int32(selected_stream, 0, "ID of the stream to show or analyze.");
+int SelectedStream() {
+ return static_cast<int>(FLAGS_selected_stream);
+}
-DEFINE_string(
- output_filename,
- "",
- "Name of a target graph data file. If set, no preview will be shown.");
-std::string OutputFilename() {
- return static_cast<std::string>(FLAGS_output_filename);
+DEFINE_int32(num_spatial_layers, 1, "Number of spatial layers to use.");
+int NumSpatialLayers() {
+ return static_cast<int>(FLAGS_num_spatial_layers);
}
-DEFINE_int32(duration, 60, "Duration of the test in seconds.");
-int DurationSecs() {
- return static_cast<int>(FLAGS_duration);
+DEFINE_int32(selected_sl,
+ -1,
+ "Spatial layer to show or analyze. -1 to disable filtering.");
+int SelectedSL() {
+ return static_cast<int>(FLAGS_selected_sl);
+}
+
+DEFINE_string(stream0,
+ "",
+ "Comma separated values describing VideoStream for stream #0.");
+std::string Stream0() {
+ return static_cast<std::string>(FLAGS_stream0);
}
+DEFINE_string(stream1,
+ "",
+ "Comma separated values describing VideoStream for stream #1.");
+std::string Stream1() {
+ return static_cast<std::string>(FLAGS_stream1);
+}
+
+DEFINE_string(sl0,
+ "",
+ "Comma separated values describing SpatialLayer for layer #0.");
+std::string SL0() {
+ return static_cast<std::string>(FLAGS_sl0);
+}
+
+DEFINE_string(sl1,
+ "",
+ "Comma separated values describing SpatialLayer for layer #1.");
+std::string SL1() {
+ return static_cast<std::string>(FLAGS_sl1);
+}
+
+DEFINE_bool(logs, false, "print logs to stderr");
+
DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
DEFINE_string(
@@ -148,6 +181,28 @@ DEFINE_string(
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
+
+// Screenshare-specific flags.
+DEFINE_int32(min_transmit_bitrate, 400, "Min transmit bitrate incl. padding.");
+int MinTransmitBitrateKbps() {
+ return FLAGS_min_transmit_bitrate;
+}
+
+DEFINE_int32(slide_change_interval,
+ 10,
+ "Interval (in seconds) between simulated slide changes.");
+int SlideChangeInterval() {
+ return static_cast<int>(FLAGS_slide_change_interval);
+}
+
+DEFINE_int32(
+ scroll_duration,
+ 0,
+ "Duration (in seconds) during which a slide will be scrolled into place.");
+int ScrollDuration() {
+ return static_cast<int>(FLAGS_scroll_duration);
+}
+
} // namespace flags
void Loopback() {
@@ -167,20 +222,32 @@ void Loopback() {
{flags::Width(), flags::Height(), flags::Fps(),
flags::MinBitrateKbps() * 1000, flags::TargetBitrateKbps() * 1000,
flags::MaxBitrateKbps() * 1000, flags::Codec(),
- flags::NumTemporalLayers(), flags::MinTransmitBitrateKbps() * 1000,
- call_bitrate_config, flags::TLDiscardThreshold(),
+ flags::NumTemporalLayers(), flags::SelectedTL(),
+ flags::MinTransmitBitrateKbps() * 1000, call_bitrate_config,
flags::FLAGS_send_side_bwe},
{}, // Video specific.
{true, flags::SlideChangeInterval(), flags::ScrollDuration()},
- {"screenshare", 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename()},
+ {"screenshare", 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename(),
+ flags::GraphTitle()},
pipe_config,
flags::FLAGS_logs};
+ std::vector<std::string> stream_descriptors;
+ stream_descriptors.push_back(flags::Stream0());
+ stream_descriptors.push_back(flags::Stream1());
+ std::vector<std::string> SL_descriptors;
+ SL_descriptors.push_back(flags::SL0());
+ SL_descriptors.push_back(flags::SL1());
+ VideoQualityTest::FillScalabilitySettings(
+ &params, stream_descriptors, flags::SelectedStream(),
+ flags::NumSpatialLayers(), flags::SelectedSL(), SL_descriptors);
+
VideoQualityTest test;
- if (flags::OutputFilename().empty())
- test.RunWithVideoRenderer(params);
- else
+ if (flags::DurationSecs()) {
test.RunWithAnalyzer(params);
+ } else {
+ test.RunWithVideoRenderer(params);
+ }
}
} // namespace webrtc
diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc
index 5be9970583..d2964b21da 100644
--- a/webrtc/video/send_statistics_proxy.cc
+++ b/webrtc/video/send_statistics_proxy.cc
@@ -11,16 +11,18 @@
#include "webrtc/video/send_statistics_proxy.h"
#include <algorithm>
+#include <cmath>
#include <map>
#include "webrtc/base/checks.h"
-
#include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/metrics.h"
namespace webrtc {
namespace {
+const float kEncodeTimeWeigthFactor = 0.5f;
+
// Used by histograms. Values of entries should not be changed.
enum HistogramCodecType {
kVideoUnknown = 0,
@@ -30,6 +32,17 @@ enum HistogramCodecType {
kVideoMax = 64,
};
+const char* GetUmaPrefix(VideoEncoderConfig::ContentType content_type) {
+ switch (content_type) {
+ case VideoEncoderConfig::ContentType::kRealtimeVideo:
+ return "WebRTC.Video.";
+ case VideoEncoderConfig::ContentType::kScreen:
+ return "WebRTC.Video.Screenshare.";
+ }
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
HistogramCodecType PayloadNameToHistogramCodecType(
const std::string& payload_name) {
if (payload_name == "VP8") {
@@ -44,7 +57,7 @@ HistogramCodecType PayloadNameToHistogramCodecType(
}
void UpdateCodecTypeHistogram(const std::string& payload_name) {
- RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.Encoder.CodecType",
+ RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.Video.Encoder.CodecType",
PayloadNameToHistogramCodecType(payload_name), kVideoMax);
}
} // namespace
@@ -52,77 +65,114 @@ void UpdateCodecTypeHistogram(const std::string& payload_name) {
const int SendStatisticsProxy::kStatsTimeoutMs = 5000;
-SendStatisticsProxy::SendStatisticsProxy(Clock* clock,
- const VideoSendStream::Config& config)
+SendStatisticsProxy::SendStatisticsProxy(
+ Clock* clock,
+ const VideoSendStream::Config& config,
+ VideoEncoderConfig::ContentType content_type)
: clock_(clock),
config_(config),
- input_frame_rate_tracker_(100u, 10u),
- sent_frame_rate_tracker_(100u, 10u),
+ content_type_(content_type),
last_sent_frame_timestamp_(0),
- max_sent_width_per_timestamp_(0),
- max_sent_height_per_timestamp_(0) {
+ encode_time_(kEncodeTimeWeigthFactor),
+ uma_container_(new UmaSamplesContainer(GetUmaPrefix(content_type_))) {
UpdateCodecTypeHistogram(config_.encoder_settings.payload_name);
}
-SendStatisticsProxy::~SendStatisticsProxy() {
+SendStatisticsProxy::~SendStatisticsProxy() {}
+
+SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer(
+ const char* prefix)
+ : uma_prefix_(prefix),
+ max_sent_width_per_timestamp_(0),
+ max_sent_height_per_timestamp_(0),
+ input_frame_rate_tracker_(100u, 10u),
+ sent_frame_rate_tracker_(100u, 10u) {}
+
+SendStatisticsProxy::UmaSamplesContainer::~UmaSamplesContainer() {
UpdateHistograms();
}
-void SendStatisticsProxy::UpdateHistograms() {
- int input_fps =
- static_cast<int>(input_frame_rate_tracker_.ComputeTotalRate());
- if (input_fps > 0)
- RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.InputFramesPerSecond", input_fps);
- int sent_fps =
- static_cast<int>(sent_frame_rate_tracker_.ComputeTotalRate());
- if (sent_fps > 0)
- RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.SentFramesPerSecond", sent_fps);
-
+void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms() {
const int kMinRequiredSamples = 200;
int in_width = input_width_counter_.Avg(kMinRequiredSamples);
int in_height = input_height_counter_.Avg(kMinRequiredSamples);
+ int in_fps = round(input_frame_rate_tracker_.ComputeTotalRate());
if (in_width != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InputWidthInPixels", in_width);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InputHeightInPixels", in_height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix_ + "InputWidthInPixels",
+ in_width);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix_ + "InputHeightInPixels",
+ in_height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix_ + "InputFramesPerSecond",
+ in_fps);
}
int sent_width = sent_width_counter_.Avg(kMinRequiredSamples);
int sent_height = sent_height_counter_.Avg(kMinRequiredSamples);
+ int sent_fps = round(sent_frame_rate_tracker_.ComputeTotalRate());
if (sent_width != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.SentWidthInPixels", sent_width);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.SentHeightInPixels", sent_height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix_ + "SentWidthInPixels",
+ sent_width);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix_ + "SentHeightInPixels",
+ sent_height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix_ + "SentFramesPerSecond",
+ sent_fps);
}
int encode_ms = encode_time_counter_.Avg(kMinRequiredSamples);
if (encode_ms != -1)
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.EncodeTimeInMs", encode_ms);
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000(uma_prefix_ + "EncodeTimeInMs", encode_ms);
int key_frames_permille = key_frame_counter_.Permille(kMinRequiredSamples);
if (key_frames_permille != -1) {
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesSentInPermille",
- key_frames_permille);
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000(uma_prefix_ + "KeyFramesSentInPermille",
+ key_frames_permille);
}
int quality_limited =
quality_limited_frame_counter_.Percent(kMinRequiredSamples);
if (quality_limited != -1) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.QualityLimitedResolutionInPercent",
- quality_limited);
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ uma_prefix_ + "QualityLimitedResolutionInPercent", quality_limited);
}
int downscales = quality_downscales_counter_.Avg(kMinRequiredSamples);
if (downscales != -1) {
- RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.QualityLimitedResolutionDownscales",
- downscales, 20);
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ uma_prefix_ + "QualityLimitedResolutionDownscales", downscales, 20);
}
int bw_limited = bw_limited_frame_counter_.Percent(kMinRequiredSamples);
if (bw_limited != -1) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BandwidthLimitedResolutionInPercent",
- bw_limited);
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ uma_prefix_ + "BandwidthLimitedResolutionInPercent", bw_limited);
}
int num_disabled = bw_resolutions_disabled_counter_.Avg(kMinRequiredSamples);
if (num_disabled != -1) {
- RTC_HISTOGRAM_ENUMERATION(
- "WebRTC.Video.BandwidthLimitedResolutionsDisabled", num_disabled, 10);
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ uma_prefix_ + "BandwidthLimitedResolutionsDisabled", num_disabled, 10);
+ }
+ int delay_ms = delay_counter_.Avg(kMinRequiredSamples);
+ if (delay_ms != -1)
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix_ + "SendSideDelayInMs",
+ delay_ms);
+
+ int max_delay_ms = max_delay_counter_.Avg(kMinRequiredSamples);
+ if (max_delay_ms != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix_ + "SendSideDelayMaxInMs",
+ max_delay_ms);
+ }
+}
+
+void SendStatisticsProxy::SetContentType(
+ VideoEncoderConfig::ContentType content_type) {
+ rtc::CritScope lock(&crit_);
+ if (content_type_ != content_type) {
+ uma_container_.reset(new UmaSamplesContainer(GetUmaPrefix(content_type)));
+ content_type_ = content_type;
}
}
+void SendStatisticsProxy::OnEncoderImplementationName(
+ const char* implementation_name) {
+ rtc::CritScope lock(&crit_);
+ stats_.encoder_implementation_name = implementation_name;
+}
+
void SendStatisticsProxy::OnOutgoingRate(uint32_t framerate, uint32_t bitrate) {
rtc::CritScope lock(&crit_);
stats_.encode_frame_rate = framerate;
@@ -132,8 +182,6 @@ void SendStatisticsProxy::OnOutgoingRate(uint32_t framerate, uint32_t bitrate) {
void SendStatisticsProxy::CpuOveruseMetricsUpdated(
const CpuOveruseMetrics& metrics) {
rtc::CritScope lock(&crit_);
- // TODO(asapersson): Change to use OnEncodedFrame() for avg_encode_time_ms.
- stats_.avg_encode_time_ms = metrics.avg_encode_time_ms;
stats_.encode_usage_percent = metrics.encode_usage_percent;
}
@@ -146,7 +194,7 @@ VideoSendStream::Stats SendStatisticsProxy::GetStats() {
rtc::CritScope lock(&crit_);
PurgeOldStats();
stats_.input_frame_rate =
- static_cast<int>(input_frame_rate_tracker_.ComputeRate());
+ round(uma_container_->input_frame_rate_tracker_.ComputeRate());
return stats_;
}
@@ -219,23 +267,28 @@ void SendStatisticsProxy::OnSendEncodedImage(
stats->height = encoded_image._encodedHeight;
update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds();
- key_frame_counter_.Add(encoded_image._frameType == kVideoFrameKey);
+ uma_container_->key_frame_counter_.Add(encoded_image._frameType ==
+ kVideoFrameKey);
+
+ stats_.bw_limited_resolution =
+ encoded_image.adapt_reason_.quality_resolution_downscales > 0 ||
+ encoded_image.adapt_reason_.bw_resolutions_disabled > 0;
if (encoded_image.adapt_reason_.quality_resolution_downscales != -1) {
bool downscaled =
encoded_image.adapt_reason_.quality_resolution_downscales > 0;
- quality_limited_frame_counter_.Add(downscaled);
+ uma_container_->quality_limited_frame_counter_.Add(downscaled);
if (downscaled) {
- quality_downscales_counter_.Add(
+ uma_container_->quality_downscales_counter_.Add(
encoded_image.adapt_reason_.quality_resolution_downscales);
}
}
if (encoded_image.adapt_reason_.bw_resolutions_disabled != -1) {
bool bw_limited = encoded_image.adapt_reason_.bw_resolutions_disabled > 0;
- bw_limited_frame_counter_.Add(bw_limited);
+ uma_container_->bw_limited_frame_counter_.Add(bw_limited);
if (bw_limited) {
- bw_resolutions_disabled_counter_.Add(
- encoded_image.adapt_reason_.bw_resolutions_disabled);
+ uma_container_->bw_resolutions_disabled_counter_.Add(
+ encoded_image.adapt_reason_.bw_resolutions_disabled);
}
}
@@ -244,31 +297,35 @@ void SendStatisticsProxy::OnSendEncodedImage(
// are encoded before the next start.
if (last_sent_frame_timestamp_ > 0 &&
encoded_image._timeStamp != last_sent_frame_timestamp_) {
- sent_frame_rate_tracker_.AddSamples(1);
- sent_width_counter_.Add(max_sent_width_per_timestamp_);
- sent_height_counter_.Add(max_sent_height_per_timestamp_);
- max_sent_width_per_timestamp_ = 0;
- max_sent_height_per_timestamp_ = 0;
+ uma_container_->sent_frame_rate_tracker_.AddSamples(1);
+ uma_container_->sent_width_counter_.Add(
+ uma_container_->max_sent_width_per_timestamp_);
+ uma_container_->sent_height_counter_.Add(
+ uma_container_->max_sent_height_per_timestamp_);
+ uma_container_->max_sent_width_per_timestamp_ = 0;
+ uma_container_->max_sent_height_per_timestamp_ = 0;
}
last_sent_frame_timestamp_ = encoded_image._timeStamp;
- max_sent_width_per_timestamp_ =
- std::max(max_sent_width_per_timestamp_,
+ uma_container_->max_sent_width_per_timestamp_ =
+ std::max(uma_container_->max_sent_width_per_timestamp_,
static_cast<int>(encoded_image._encodedWidth));
- max_sent_height_per_timestamp_ =
- std::max(max_sent_height_per_timestamp_,
+ uma_container_->max_sent_height_per_timestamp_ =
+ std::max(uma_container_->max_sent_height_per_timestamp_,
static_cast<int>(encoded_image._encodedHeight));
}
void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
rtc::CritScope lock(&crit_);
- input_frame_rate_tracker_.AddSamples(1);
- input_width_counter_.Add(width);
- input_height_counter_.Add(height);
+ uma_container_->input_frame_rate_tracker_.AddSamples(1);
+ uma_container_->input_width_counter_.Add(width);
+ uma_container_->input_height_counter_.Add(height);
}
void SendStatisticsProxy::OnEncodedFrame(int encode_time_ms) {
rtc::CritScope lock(&crit_);
- encode_time_counter_.Add(encode_time_ms);
+ uma_container_->encode_time_counter_.Add(encode_time_ms);
+ encode_time_.Apply(1.0f, encode_time_ms);
+ stats_.avg_encode_time_ms = round(encode_time_.filtered());
}
void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
@@ -337,6 +394,9 @@ void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms,
return;
stats->avg_delay_ms = avg_delay_ms;
stats->max_delay_ms = max_delay_ms;
+
+ uma_container_->delay_counter_.Add(avg_delay_ms);
+ uma_container_->max_delay_counter_.Add(max_delay_ms);
}
void SendStatisticsProxy::SampleCounter::Add(int sample) {
@@ -372,5 +432,4 @@ int SendStatisticsProxy::BoolSampleCounter::Fraction(
return -1;
return static_cast<int>((sum * multiplier / num_samples) + 0.5f);
}
-
} // namespace webrtc
diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h
index 26ea09c4c4..7f6df06ad8 100644
--- a/webrtc/video/send_statistics_proxy.h
+++ b/webrtc/video/send_statistics_proxy.h
@@ -11,18 +11,20 @@
#ifndef WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_
#define WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_
+#include <map>
#include <string>
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/exp_filter.h"
#include "webrtc/base/ratetracker.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/video_engine/overuse_frame_detector.h"
-#include "webrtc/video_engine/vie_encoder.h"
+#include "webrtc/video/overuse_frame_detector.h"
+#include "webrtc/video/vie_encoder.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
@@ -38,7 +40,9 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
public:
static const int kStatsTimeoutMs;
- SendStatisticsProxy(Clock* clock, const VideoSendStream::Config& config);
+ SendStatisticsProxy(Clock* clock,
+ const VideoSendStream::Config& config,
+ VideoEncoderConfig::ContentType content_type);
virtual ~SendStatisticsProxy();
VideoSendStream::Stats GetStats();
@@ -54,10 +58,15 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
// From VideoEncoderRateObserver.
void OnSetRates(uint32_t bitrate_bps, int framerate) override;
+ void OnEncoderImplementationName(const char* implementation_name);
void OnOutgoingRate(uint32_t framerate, uint32_t bitrate);
void OnSuspendChange(bool is_suspended);
void OnInactiveSsrc(uint32_t ssrc);
+ // Used to indicate change in content type, which may require a change in
+ // how stats are collected.
+ void SetContentType(VideoEncoderConfig::ContentType content_type);
+
protected:
// From CpuOveruseMetricsObserver.
void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override;
@@ -112,36 +121,52 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
int num_samples;
};
struct StatsUpdateTimes {
- StatsUpdateTimes() : resolution_update_ms(0) {}
+ StatsUpdateTimes() : resolution_update_ms(0), bitrate_update_ms(0) {}
int64_t resolution_update_ms;
int64_t bitrate_update_ms;
};
void PurgeOldStats() EXCLUSIVE_LOCKS_REQUIRED(crit_);
VideoSendStream::StreamStats* GetStatsEntry(uint32_t ssrc)
EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void UpdateHistograms() EXCLUSIVE_LOCKS_REQUIRED(crit_);
Clock* const clock_;
const VideoSendStream::Config config_;
mutable rtc::CriticalSection crit_;
+ VideoEncoderConfig::ContentType content_type_ GUARDED_BY(crit_);
VideoSendStream::Stats stats_ GUARDED_BY(crit_);
- rtc::RateTracker input_frame_rate_tracker_ GUARDED_BY(crit_);
- rtc::RateTracker sent_frame_rate_tracker_ GUARDED_BY(crit_);
uint32_t last_sent_frame_timestamp_ GUARDED_BY(crit_);
std::map<uint32_t, StatsUpdateTimes> update_times_ GUARDED_BY(crit_);
+ rtc::ExpFilter encode_time_ GUARDED_BY(crit_);
+
+ // Contains stats used for UMA histograms. These stats will be reset if
+ // content type changes between real-time video and screenshare, since these
+ // will be reported separately.
+ struct UmaSamplesContainer {
+ explicit UmaSamplesContainer(const char* prefix);
+ ~UmaSamplesContainer();
+
+ void UpdateHistograms();
+
+ const std::string uma_prefix_;
+ int max_sent_width_per_timestamp_;
+ int max_sent_height_per_timestamp_;
+ SampleCounter input_width_counter_;
+ SampleCounter input_height_counter_;
+ SampleCounter sent_width_counter_;
+ SampleCounter sent_height_counter_;
+ SampleCounter encode_time_counter_;
+ BoolSampleCounter key_frame_counter_;
+ BoolSampleCounter quality_limited_frame_counter_;
+ SampleCounter quality_downscales_counter_;
+ BoolSampleCounter bw_limited_frame_counter_;
+ SampleCounter bw_resolutions_disabled_counter_;
+ SampleCounter delay_counter_;
+ SampleCounter max_delay_counter_;
+ rtc::RateTracker input_frame_rate_tracker_;
+ rtc::RateTracker sent_frame_rate_tracker_;
+ };
- int max_sent_width_per_timestamp_ GUARDED_BY(crit_);
- int max_sent_height_per_timestamp_ GUARDED_BY(crit_);
- SampleCounter input_width_counter_ GUARDED_BY(crit_);
- SampleCounter input_height_counter_ GUARDED_BY(crit_);
- SampleCounter sent_width_counter_ GUARDED_BY(crit_);
- SampleCounter sent_height_counter_ GUARDED_BY(crit_);
- SampleCounter encode_time_counter_ GUARDED_BY(crit_);
- BoolSampleCounter key_frame_counter_ GUARDED_BY(crit_);
- BoolSampleCounter quality_limited_frame_counter_ GUARDED_BY(crit_);
- SampleCounter quality_downscales_counter_ GUARDED_BY(crit_);
- BoolSampleCounter bw_limited_frame_counter_ GUARDED_BY(crit_);
- SampleCounter bw_resolutions_disabled_counter_ GUARDED_BY(crit_);
+ rtc::scoped_ptr<UmaSamplesContainer> uma_container_ GUARDED_BY(crit_);
};
} // namespace webrtc
diff --git a/webrtc/video/send_statistics_proxy_unittest.cc b/webrtc/video/send_statistics_proxy_unittest.cc
index 8e6b7bcab3..fc1f3fdbde 100644
--- a/webrtc/video/send_statistics_proxy_unittest.cc
+++ b/webrtc/video/send_statistics_proxy_unittest.cc
@@ -16,6 +16,7 @@
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/test/histogram.h"
namespace webrtc {
@@ -28,8 +29,9 @@ class SendStatisticsProxyTest : public ::testing::Test {
protected:
virtual void SetUp() {
- statistics_proxy_.reset(
- new SendStatisticsProxy(&fake_clock_, GetTestConfig()));
+ statistics_proxy_.reset(new SendStatisticsProxy(
+ &fake_clock_, GetTestConfig(),
+ VideoEncoderConfig::ContentType::kRealtimeVideo));
expected_ = VideoSendStream::Stats();
}
@@ -287,6 +289,33 @@ TEST_F(SendStatisticsProxyTest, SendSideDelay) {
ExpectEqual(expected_, stats);
}
+TEST_F(SendStatisticsProxyTest, OnEncodedFrame) {
+ const int kEncodeTimeMs = 11;
+ statistics_proxy_->OnEncodedFrame(kEncodeTimeMs);
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ EXPECT_EQ(kEncodeTimeMs, stats.avg_encode_time_ms);
+}
+
+TEST_F(SendStatisticsProxyTest, SwitchContentTypeUpdatesHistograms) {
+ test::ClearHistograms();
+ const int kMinRequiredSamples = 200;
+ const int kWidth = 640;
+ const int kHeight = 480;
+
+ for (int i = 0; i < kMinRequiredSamples; ++i)
+ statistics_proxy_->OnIncomingFrame(kWidth, kHeight);
+
+ // No switch, stats not should be updated.
+ statistics_proxy_->SetContentType(
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
+ EXPECT_EQ(0, test::NumHistogramSamples("WebRTC.Video.InputWidthInPixels"));
+
+ // Switch to screenshare, real-time stats should be updated.
+ statistics_proxy_->SetContentType(VideoEncoderConfig::ContentType::kScreen);
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.InputWidthInPixels"));
+}
+
TEST_F(SendStatisticsProxyTest, NoSubstreams) {
uint32_t excluded_ssrc =
std::max(
diff --git a/webrtc/video/stream_synchronization.cc b/webrtc/video/stream_synchronization.cc
new file mode 100644
index 0000000000..cb37d80ef5
--- /dev/null
+++ b/webrtc/video/stream_synchronization.cc
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/stream_synchronization.h"
+
+#include <assert.h>
+#include <math.h>
+#include <stdlib.h>
+
+#include <algorithm>
+
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+static const int kMaxChangeMs = 80;
+static const int kMaxDeltaDelayMs = 10000;
+static const int kFilterLength = 4;
+// Minimum difference between audio and video to warrant a change.
+static const int kMinDeltaMs = 30;
+
+struct ViESyncDelay {
+ ViESyncDelay() {
+ extra_video_delay_ms = 0;
+ last_video_delay_ms = 0;
+ extra_audio_delay_ms = 0;
+ last_audio_delay_ms = 0;
+ network_delay = 120;
+ }
+
+ int extra_video_delay_ms;
+ int last_video_delay_ms;
+ int extra_audio_delay_ms;
+ int last_audio_delay_ms;
+ int network_delay;
+};
+
+StreamSynchronization::StreamSynchronization(uint32_t video_primary_ssrc,
+ int audio_channel_id)
+ : channel_delay_(new ViESyncDelay),
+ video_primary_ssrc_(video_primary_ssrc),
+ audio_channel_id_(audio_channel_id),
+ base_target_delay_ms_(0),
+ avg_diff_ms_(0) {
+}
+
+StreamSynchronization::~StreamSynchronization() {
+ delete channel_delay_;
+}
+
+bool StreamSynchronization::ComputeRelativeDelay(
+ const Measurements& audio_measurement,
+ const Measurements& video_measurement,
+ int* relative_delay_ms) {
+ assert(relative_delay_ms);
+ if (audio_measurement.rtcp.size() < 2 || video_measurement.rtcp.size() < 2) {
+ // We need two RTCP SR reports per stream to do synchronization.
+ return false;
+ }
+ int64_t audio_last_capture_time_ms;
+ if (!RtpToNtpMs(audio_measurement.latest_timestamp,
+ audio_measurement.rtcp,
+ &audio_last_capture_time_ms)) {
+ return false;
+ }
+ int64_t video_last_capture_time_ms;
+ if (!RtpToNtpMs(video_measurement.latest_timestamp,
+ video_measurement.rtcp,
+ &video_last_capture_time_ms)) {
+ return false;
+ }
+ if (video_last_capture_time_ms < 0) {
+ return false;
+ }
+ // Positive diff means that video_measurement is behind audio_measurement.
+ *relative_delay_ms = video_measurement.latest_receive_time_ms -
+ audio_measurement.latest_receive_time_ms -
+ (video_last_capture_time_ms - audio_last_capture_time_ms);
+ if (*relative_delay_ms > kMaxDeltaDelayMs ||
+ *relative_delay_ms < -kMaxDeltaDelayMs) {
+ return false;
+ }
+ return true;
+}
+
+bool StreamSynchronization::ComputeDelays(int relative_delay_ms,
+ int current_audio_delay_ms,
+ int* total_audio_delay_target_ms,
+ int* total_video_delay_target_ms) {
+ assert(total_audio_delay_target_ms && total_video_delay_target_ms);
+
+ int current_video_delay_ms = *total_video_delay_target_ms;
+ LOG(LS_VERBOSE) << "Audio delay: " << current_audio_delay_ms
+ << ", network delay diff: " << channel_delay_->network_delay
+ << " current diff: " << relative_delay_ms
+ << " for channel " << audio_channel_id_;
+ // Calculate the difference between the lowest possible video delay and
+ // the current audio delay.
+ int current_diff_ms = current_video_delay_ms - current_audio_delay_ms +
+ relative_delay_ms;
+
+ avg_diff_ms_ = ((kFilterLength - 1) * avg_diff_ms_ +
+ current_diff_ms) / kFilterLength;
+ if (abs(avg_diff_ms_) < kMinDeltaMs) {
+ // Don't adjust if the diff is within our margin.
+ return false;
+ }
+
+ // Make sure we don't move too fast.
+ int diff_ms = avg_diff_ms_ / 2;
+ diff_ms = std::min(diff_ms, kMaxChangeMs);
+ diff_ms = std::max(diff_ms, -kMaxChangeMs);
+
+ // Reset the average after a move to prevent overshooting reaction.
+ avg_diff_ms_ = 0;
+
+ if (diff_ms > 0) {
+ // The minimum video delay is longer than the current audio delay.
+ // We need to decrease extra video delay, or add extra audio delay.
+ if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
+ // We have extra delay added to ViE. Reduce this delay before adding
+ // extra delay to VoE.
+ channel_delay_->extra_video_delay_ms -= diff_ms;
+ channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
+ } else { // channel_delay_->extra_video_delay_ms > 0
+ // We have no extra video delay to remove, increase the audio delay.
+ channel_delay_->extra_audio_delay_ms += diff_ms;
+ channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
+ }
+ } else { // if (diff_ms > 0)
+ // The video delay is lower than the current audio delay.
+ // We need to decrease extra audio delay, or add extra video delay.
+ if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
+ // We have extra delay in VoiceEngine.
+ // Start with decreasing the voice delay.
+ // Note: diff_ms is negative; add the negative difference.
+ channel_delay_->extra_audio_delay_ms += diff_ms;
+ channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
+ } else { // channel_delay_->extra_audio_delay_ms > base_target_delay_ms_
+ // We have no extra delay in VoiceEngine, increase the video delay.
+ // Note: diff_ms is negative; subtract the negative difference.
+ channel_delay_->extra_video_delay_ms -= diff_ms; // X - (-Y) = X + Y.
+ channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
+ }
+ }
+
+ // Make sure that video is never below our target.
+ channel_delay_->extra_video_delay_ms = std::max(
+ channel_delay_->extra_video_delay_ms, base_target_delay_ms_);
+
+ int new_video_delay_ms;
+ if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
+ new_video_delay_ms = channel_delay_->extra_video_delay_ms;
+ } else {
+ // No change to the extra video delay. We are changing audio and we only
+ // allow to change one at the time.
+ new_video_delay_ms = channel_delay_->last_video_delay_ms;
+ }
+
+ // Make sure that we don't go below the extra video delay.
+ new_video_delay_ms = std::max(
+ new_video_delay_ms, channel_delay_->extra_video_delay_ms);
+
+ // Verify we don't go above the maximum allowed video delay.
+ new_video_delay_ms =
+ std::min(new_video_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
+
+ int new_audio_delay_ms;
+ if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
+ new_audio_delay_ms = channel_delay_->extra_audio_delay_ms;
+ } else {
+ // No change to the audio delay. We are changing video and we only
+ // allow to change one at the time.
+ new_audio_delay_ms = channel_delay_->last_audio_delay_ms;
+ }
+
+ // Make sure that we don't go below the extra audio delay.
+ new_audio_delay_ms = std::max(
+ new_audio_delay_ms, channel_delay_->extra_audio_delay_ms);
+
+ // Verify we don't go above the maximum allowed audio delay.
+ new_audio_delay_ms =
+ std::min(new_audio_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
+
+ // Remember our last audio and video delays.
+ channel_delay_->last_video_delay_ms = new_video_delay_ms;
+ channel_delay_->last_audio_delay_ms = new_audio_delay_ms;
+
+ LOG(LS_VERBOSE) << "Sync video delay " << new_video_delay_ms
+ << " for video primary SSRC " << video_primary_ssrc_
+ << " and audio delay " << channel_delay_->extra_audio_delay_ms
+ << " for audio channel " << audio_channel_id_;
+
+ // Return values.
+ *total_video_delay_target_ms = new_video_delay_ms;
+ *total_audio_delay_target_ms = new_audio_delay_ms;
+ return true;
+}
+
+void StreamSynchronization::SetTargetBufferingDelay(int target_delay_ms) {
+ // Initial extra delay for audio (accounting for existing extra delay).
+ channel_delay_->extra_audio_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+ channel_delay_->last_audio_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ // The video delay is compared to the last value (and how much we can update
+ // is limited by that as well).
+ channel_delay_->last_video_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ channel_delay_->extra_video_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ // Video is already delayed by the desired amount.
+ base_target_delay_ms_ = target_delay_ms;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/stream_synchronization.h b/webrtc/video/stream_synchronization.h
new file mode 100644
index 0000000000..cb7c110f44
--- /dev/null
+++ b/webrtc/video/stream_synchronization.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_STREAM_SYNCHRONIZATION_H_
+#define WEBRTC_VIDEO_STREAM_SYNCHRONIZATION_H_
+
+#include <list>
+
+#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct ViESyncDelay;
+
+class StreamSynchronization {
+ public:
+ struct Measurements {
+ Measurements() : rtcp(), latest_receive_time_ms(0), latest_timestamp(0) {}
+ RtcpList rtcp;
+ int64_t latest_receive_time_ms;
+ uint32_t latest_timestamp;
+ };
+
+ StreamSynchronization(uint32_t video_primary_ssrc, int audio_channel_id);
+ ~StreamSynchronization();
+
+ bool ComputeDelays(int relative_delay_ms,
+ int current_audio_delay_ms,
+ int* extra_audio_delay_ms,
+ int* total_video_delay_target_ms);
+
+ // On success |relative_delay| contains the number of milliseconds later video
+ // is rendered relative audio. If audio is played back later than video a
+ // |relative_delay| will be negative.
+ static bool ComputeRelativeDelay(const Measurements& audio_measurement,
+ const Measurements& video_measurement,
+ int* relative_delay_ms);
+ // Set target buffering delay - All audio and video will be delayed by at
+ // least target_delay_ms.
+ void SetTargetBufferingDelay(int target_delay_ms);
+
+ private:
+ ViESyncDelay* channel_delay_;
+ const uint32_t video_primary_ssrc_;
+ const int audio_channel_id_;
+ int base_target_delay_ms_;
+ int avg_diff_ms_;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_STREAM_SYNCHRONIZATION_H_
diff --git a/webrtc/video/stream_synchronization_unittest.cc b/webrtc/video/stream_synchronization_unittest.cc
new file mode 100644
index 0000000000..2834dfe1b2
--- /dev/null
+++ b/webrtc/video/stream_synchronization_unittest.cc
@@ -0,0 +1,563 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+
+#include <algorithm>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/video/stream_synchronization.h"
+
+namespace webrtc {
+
+// These correspond to the same constants defined in vie_sync_module.cc.
+enum { kMaxVideoDiffMs = 80 };
+enum { kMaxAudioDiffMs = 80 };
+enum { kMaxDelay = 1500 };
+
+// Test constants.
+enum { kDefaultAudioFrequency = 8000 };
+enum { kDefaultVideoFrequency = 90000 };
+const double kNtpFracPerMs = 4.294967296E6;
+static const int kSmoothingFilter = 4 * 2;
+
+class Time {
+ public:
+ explicit Time(int64_t offset)
+ : kNtpJan1970(2208988800UL),
+ time_now_ms_(offset) {}
+
+ RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const {
+ RtcpMeasurement rtcp;
+ NowNtp(&rtcp.ntp_secs, &rtcp.ntp_frac);
+ rtcp.rtp_timestamp = NowRtp(frequency, offset);
+ return rtcp;
+ }
+
+ void NowNtp(uint32_t* ntp_secs, uint32_t* ntp_frac) const {
+ *ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
+ int64_t remainder_ms = time_now_ms_ % 1000;
+ *ntp_frac = static_cast<uint32_t>(
+ static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5);
+ }
+
+ uint32_t NowRtp(int frequency, uint32_t offset) const {
+ return frequency * time_now_ms_ / 1000 + offset;
+ }
+
+ void IncreaseTimeMs(int64_t inc) {
+ time_now_ms_ += inc;
+ }
+
+ int64_t time_now_ms() const {
+ return time_now_ms_;
+ }
+
+ private:
+ // January 1970, in NTP seconds.
+ const uint32_t kNtpJan1970;
+ int64_t time_now_ms_;
+};
+
+class StreamSynchronizationTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ sync_ = new StreamSynchronization(0, 0);
+ send_time_ = new Time(kSendTimeOffsetMs);
+ receive_time_ = new Time(kReceiveTimeOffsetMs);
+ audio_clock_drift_ = 1.0;
+ video_clock_drift_ = 1.0;
+ }
+
+ virtual void TearDown() {
+ delete sync_;
+ delete send_time_;
+ delete receive_time_;
+ }
+
+ // Generates the necessary RTCP measurements and RTP timestamps and computes
+ // the audio and video delays needed to get the two streams in sync.
+ // |audio_delay_ms| and |video_delay_ms| are the number of milliseconds after
+ // capture which the frames are rendered.
+ // |current_audio_delay_ms| is the number of milliseconds which audio is
+ // currently being delayed by the receiver.
+ bool DelayedStreams(int audio_delay_ms,
+ int video_delay_ms,
+ int current_audio_delay_ms,
+ int* extra_audio_delay_ms,
+ int* total_video_delay_ms) {
+ int audio_frequency = static_cast<int>(kDefaultAudioFrequency *
+ audio_clock_drift_ + 0.5);
+ int audio_offset = 0;
+ int video_frequency = static_cast<int>(kDefaultVideoFrequency *
+ video_clock_drift_ + 0.5);
+ int video_offset = 0;
+ StreamSynchronization::Measurements audio;
+ StreamSynchronization::Measurements video;
+ // Generate NTP/RTP timestamp pair for both streams corresponding to RTCP.
+ audio.rtcp.push_front(send_time_->GenerateRtcp(audio_frequency,
+ audio_offset));
+ send_time_->IncreaseTimeMs(100);
+ receive_time_->IncreaseTimeMs(100);
+ video.rtcp.push_front(send_time_->GenerateRtcp(video_frequency,
+ video_offset));
+ send_time_->IncreaseTimeMs(900);
+ receive_time_->IncreaseTimeMs(900);
+ audio.rtcp.push_front(send_time_->GenerateRtcp(audio_frequency,
+ audio_offset));
+ send_time_->IncreaseTimeMs(100);
+ receive_time_->IncreaseTimeMs(100);
+ video.rtcp.push_front(send_time_->GenerateRtcp(video_frequency,
+ video_offset));
+ send_time_->IncreaseTimeMs(900);
+ receive_time_->IncreaseTimeMs(900);
+
+ // Capture an audio and a video frame at the same time.
+ audio.latest_timestamp = send_time_->NowRtp(audio_frequency,
+ audio_offset);
+ video.latest_timestamp = send_time_->NowRtp(video_frequency,
+ video_offset);
+
+ if (audio_delay_ms > video_delay_ms) {
+ // Audio later than video.
+ receive_time_->IncreaseTimeMs(video_delay_ms);
+ video.latest_receive_time_ms = receive_time_->time_now_ms();
+ receive_time_->IncreaseTimeMs(audio_delay_ms - video_delay_ms);
+ audio.latest_receive_time_ms = receive_time_->time_now_ms();
+ } else {
+ // Video later than audio.
+ receive_time_->IncreaseTimeMs(audio_delay_ms);
+ audio.latest_receive_time_ms = receive_time_->time_now_ms();
+ receive_time_->IncreaseTimeMs(video_delay_ms - audio_delay_ms);
+ video.latest_receive_time_ms = receive_time_->time_now_ms();
+ }
+ int relative_delay_ms;
+ StreamSynchronization::ComputeRelativeDelay(audio, video,
+ &relative_delay_ms);
+ EXPECT_EQ(video_delay_ms - audio_delay_ms, relative_delay_ms);
+ return sync_->ComputeDelays(relative_delay_ms,
+ current_audio_delay_ms,
+ extra_audio_delay_ms,
+ total_video_delay_ms);
+ }
+
+ // Simulate audio playback 300 ms after capture and video rendering 100 ms
+ // after capture. Verify that the correct extra delays are calculated for
+ // audio and video, and that they change correctly when we simulate that
+ // NetEQ or the VCM adds more delay to the streams.
+ // TODO(holmer): This is currently wrong! We should simply change
+ // audio_delay_ms or video_delay_ms since those now include VCM and NetEQ
+ // delays.
+ void BothDelayedAudioLaterTest(int base_target_delay) {
+ int current_audio_delay_ms = base_target_delay;
+ int audio_delay_ms = base_target_delay + 300;
+ int video_delay_ms = base_target_delay + 100;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay;
+ int filtered_move = (audio_delay_ms - video_delay_ms) / kSmoothingFilter;
+ const int kNeteqDelayIncrease = 50;
+ const int kNeteqDelayDecrease = 10;
+
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + 2 * filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + 3 * filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+
+ // Simulate that NetEQ introduces some audio delay.
+ current_audio_delay_ms = base_target_delay + kNeteqDelayIncrease;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ filtered_move = 3 * filtered_move +
+ (kNeteqDelayIncrease + audio_delay_ms - video_delay_ms) /
+ kSmoothingFilter;
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+
+ // Simulate that NetEQ reduces its delay.
+ current_audio_delay_ms = base_target_delay + kNeteqDelayDecrease;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+
+ filtered_move = filtered_move +
+ (kNeteqDelayDecrease + audio_delay_ms - video_delay_ms) /
+ kSmoothingFilter;
+
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ }
+
+ void BothDelayedVideoLaterTest(int base_target_delay) {
+ int current_audio_delay_ms = base_target_delay;
+ int audio_delay_ms = base_target_delay + 100;
+ int video_delay_ms = base_target_delay + 300;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay;
+
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than this in 1 second.
+ EXPECT_GE(base_target_delay + kMaxAudioDiffMs, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ int current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason reduced the delay.
+ current_audio_delay_ms = base_target_delay + 10;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // Since we only can ask NetEQ for a certain amount of extra delay, and
+ // we only measure the total NetEQ delay, we will ask for additional delay
+ // here to try to stay in sync.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason significantly increased the delay.
+ current_audio_delay_ms = base_target_delay + 350;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ }
+
+ int MaxAudioDelayIncrease(int current_audio_delay_ms, int delay_ms) {
+ return std::min((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
+ static_cast<int>(kMaxAudioDiffMs));
+ }
+
+ int MaxAudioDelayDecrease(int current_audio_delay_ms, int delay_ms) {
+ return std::max((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
+ -kMaxAudioDiffMs);
+ }
+
+ enum { kSendTimeOffsetMs = 98765 };
+ enum { kReceiveTimeOffsetMs = 43210 };
+
+ StreamSynchronization* sync_;
+ Time* send_time_; // The simulated clock at the sender.
+ Time* receive_time_; // The simulated clock at the receiver.
+ double audio_clock_drift_;
+ double video_clock_drift_;
+};
+
+TEST_F(StreamSynchronizationTest, NoDelay) {
+ uint32_t current_audio_delay_ms = 0;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_FALSE(DelayedStreams(0, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ EXPECT_EQ(0, total_video_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, VideoDelay) {
+ uint32_t current_audio_delay_ms = 0;
+ int delay_ms = 200;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ // The video delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(delay_ms / kSmoothingFilter, total_video_delay_ms);
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ // Simulate 0 minimum delay in the VCM.
+ total_video_delay_ms = 0;
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ // The video delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(2 * delay_ms / kSmoothingFilter, total_video_delay_ms);
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ // Simulate 0 minimum delay in the VCM.
+ total_video_delay_ms = 0;
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ EXPECT_EQ(3 * delay_ms / kSmoothingFilter, total_video_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, AudioDelay) {
+ int current_audio_delay_ms = 0;
+ int delay_ms = 200;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(delay_ms / kSmoothingFilter, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ int current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason reduced the delay.
+ current_audio_delay_ms = 10;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // Since we only can ask NetEQ for a certain amount of extra delay, and
+ // we only measure the total NetEQ delay, we will ask for additional delay
+ // here to try to
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason significantly increased the delay.
+ current_audio_delay_ms = 350;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayDecrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterAudioClockDrift) {
+ audio_clock_drift_ = 1.05;
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterVideoClockDrift) {
+ video_clock_drift_ = 1.05;
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDrift) {
+ audio_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDrift) {
+ video_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BaseDelay) {
+ int base_target_delay_ms = 2000;
+ int current_audio_delay_ms = 2000;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ // Triggering another call with the same values. Delay should not be modified.
+ base_target_delay_ms = 2000;
+ current_audio_delay_ms = base_target_delay_ms;
+ total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ // Changing delay value - intended to test this module only. In practice it
+ // would take VoE time to adapt.
+ base_target_delay_ms = 5000;
+ current_audio_delay_ms = base_target_delay_ms;
+ total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioLaterWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ audio_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ video_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest,
+ BothDelayedVideoLaterAudioClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ audio_clock_drift_ = 1.05;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest,
+ BothDelayedVideoLaterVideoClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ video_clock_drift_ = 1.05;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc
index 42bc65f05f..1c5f299291 100644
--- a/webrtc/video/video_capture_input.cc
+++ b/webrtc/video/video_capture_input.cc
@@ -13,18 +13,17 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/modules/video_capture/include/video_capture_factory.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_capture/video_capture_factory.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/overuse_frame_detector.h"
#include "webrtc/video/send_statistics_proxy.h"
-#include "webrtc/video_engine/overuse_frame_detector.h"
-#include "webrtc/video_engine/vie_encoder.h"
+#include "webrtc/video/vie_encoder.h"
namespace webrtc {
@@ -42,10 +41,8 @@ VideoCaptureInput::VideoCaptureInput(
local_renderer_(local_renderer),
stats_proxy_(stats_proxy),
incoming_frame_cs_(CriticalSectionWrapper::CreateCriticalSection()),
- encoder_thread_(ThreadWrapper::CreateThread(EncoderThreadFunction,
- this,
- "EncoderThread")),
- capture_event_(EventWrapper::Create()),
+ encoder_thread_(EncoderThreadFunction, this, "EncoderThread"),
+ capture_event_(false, false),
stop_(0),
last_captured_timestamp_(0),
delta_ntp_internal_ms_(
@@ -56,8 +53,8 @@ VideoCaptureInput::VideoCaptureInput(
overuse_observer,
stats_proxy)),
encoding_time_observer_(encoding_time_observer) {
- encoder_thread_->Start();
- encoder_thread_->SetPriority(kHighPriority);
+ encoder_thread_.Start();
+ encoder_thread_.SetPriority(rtc::kHighPriority);
module_process_thread_->RegisterModule(overuse_detector_.get());
}
@@ -66,8 +63,8 @@ VideoCaptureInput::~VideoCaptureInput() {
// Stop the thread.
rtc::AtomicOps::ReleaseStore(&stop_, 1);
- capture_event_->Set();
- encoder_thread_->Stop();
+ capture_event_.Set();
+ encoder_thread_.Stop();
}
void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
@@ -118,7 +115,7 @@ void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
- capture_event_->Set();
+ capture_event_.Set();
}
bool VideoCaptureInput::EncoderThreadFunction(void* obj) {
@@ -128,7 +125,7 @@ bool VideoCaptureInput::EncoderThreadFunction(void* obj) {
bool VideoCaptureInput::EncoderProcess() {
static const int kThreadWaitTimeMs = 100;
int64_t capture_time = -1;
- if (capture_event_->Wait(kThreadWaitTimeMs) == kEventSignaled) {
+ if (capture_event_.Wait(kThreadWaitTimeMs)) {
if (rtc::AtomicOps::AcquireLoad(&stop_))
return false;
@@ -150,7 +147,6 @@ bool VideoCaptureInput::EncoderProcess() {
if (encode_start_time != -1) {
int encode_time_ms = static_cast<int>(
Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time);
- overuse_detector_->FrameEncoded(encode_time_ms);
stats_proxy_->OnEncodedFrame(encode_time_ms);
if (encoding_time_observer_) {
encoding_time_observer_->OnReportEncodedTime(
diff --git a/webrtc/video/video_capture_input.h b/webrtc/video/video_capture_input.h
index 5a86ad265f..d44907cd0e 100644
--- a/webrtc/video/video_capture_input.h
+++ b/webrtc/video/video_capture_input.h
@@ -14,18 +14,18 @@
#include <vector>
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_capture/include/video_capture.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
+#include "webrtc/modules/video_capture/video_capture.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/typedefs.h"
-#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
@@ -34,7 +34,6 @@ class Config;
class CpuOveruseMetricsObserver;
class CpuOveruseObserver;
class CriticalSectionWrapper;
-class EventWrapper;
class OveruseFrameDetector;
class ProcessThread;
class RegistrableCpuOveruseMetricsObserver;
@@ -66,8 +65,6 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput {
static bool EncoderThreadFunction(void* obj);
bool EncoderProcess();
- void DeliverI420Frame(VideoFrame* video_frame);
-
rtc::scoped_ptr<CriticalSectionWrapper> capture_cs_;
ProcessThread* const module_process_thread_;
@@ -79,8 +76,8 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput {
rtc::scoped_ptr<CriticalSectionWrapper> incoming_frame_cs_;
VideoFrame incoming_frame_;
- rtc::scoped_ptr<ThreadWrapper> encoder_thread_;
- rtc::scoped_ptr<EventWrapper> capture_event_;
+ rtc::PlatformThread encoder_thread_;
+ rtc::Event capture_event_;
volatile int stop_;
diff --git a/webrtc/video/video_capture_input_unittest.cc b/webrtc/video/video_capture_input_unittest.cc
index e8bc2ad1c9..9d720e2294 100644
--- a/webrtc/video/video_capture_input_unittest.cc
+++ b/webrtc/video/video_capture_input_unittest.cc
@@ -13,11 +13,11 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/event.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common.h"
-#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/ref_count.h"
#include "webrtc/system_wrappers/include/scoped_vector.h"
#include "webrtc/test/fake_texture_frame.h"
@@ -51,9 +51,10 @@ class VideoCaptureInputTest : public ::testing::Test {
VideoCaptureInputTest()
: mock_process_thread_(new NiceMock<MockProcessThread>),
mock_frame_callback_(new NiceMock<MockVideoCaptureCallback>),
- output_frame_event_(EventWrapper::Create()),
+ output_frame_event_(false, false),
stats_proxy_(Clock::GetRealTimeClock(),
- webrtc::VideoSendStream::Config(nullptr)) {}
+ webrtc::VideoSendStream::Config(nullptr),
+ webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo) {}
virtual void SetUp() {
EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_))
@@ -81,11 +82,11 @@ class VideoCaptureInputTest : public ::testing::Test {
if (frame.native_handle() == NULL)
output_frame_ybuffers_.push_back(frame.buffer(kYPlane));
output_frames_.push_back(new VideoFrame(frame));
- output_frame_event_->Set();
+ output_frame_event_.Set();
}
void WaitOutputFrame() {
- EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ EXPECT_TRUE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
}
rtc::scoped_ptr<MockProcessThread> mock_process_thread_;
@@ -98,7 +99,7 @@ class VideoCaptureInputTest : public ::testing::Test {
ScopedVector<VideoFrame> input_frames_;
// Indicate an output frame has arrived.
- rtc::scoped_ptr<EventWrapper> output_frame_event_;
+ rtc::Event output_frame_event_;
// Output delivered frames of VideoCaptureInput.
ScopedVector<VideoFrame> output_frames_;
@@ -111,20 +112,19 @@ class VideoCaptureInputTest : public ::testing::Test {
TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
// Indicate an output frame has arrived.
- rtc::scoped_ptr<EventWrapper> frame_destroyed_event(EventWrapper::Create());
+ rtc::Event frame_destroyed_event(false, false);
class TestBuffer : public webrtc::I420Buffer {
public:
- explicit TestBuffer(EventWrapper* event)
- : I420Buffer(5, 5), event_(event) {}
+ explicit TestBuffer(rtc::Event* event) : I420Buffer(5, 5), event_(event) {}
private:
friend class rtc::RefCountedObject<TestBuffer>;
~TestBuffer() override { event_->Set(); }
- EventWrapper* event_;
+ rtc::Event* const event_;
};
VideoFrame frame(
- new rtc::RefCountedObject<TestBuffer>(frame_destroyed_event.get()), 1, 1,
+ new rtc::RefCountedObject<TestBuffer>(&frame_destroyed_event), 1, 1,
kVideoRotation_0);
AddInputFrame(&frame);
@@ -134,7 +134,7 @@ TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
frame.video_frame_buffer().get());
output_frames_.clear();
frame.Reset();
- EXPECT_EQ(kEventSignaled, frame_destroyed_event->Wait(FRAME_TIMEOUT_MS));
+ EXPECT_TRUE(frame_destroyed_event.Wait(FRAME_TIMEOUT_MS));
}
TEST_F(VideoCaptureInputTest, TestNtpTimeStampSetIfRenderTimeSet) {
@@ -171,12 +171,12 @@ TEST_F(VideoCaptureInputTest, DropsFramesWithSameOrOldNtpTimestamp) {
// Repeat frame with the same NTP timestamp should drop.
AddInputFrame(input_frames_[0]);
- EXPECT_EQ(kEventTimeout, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ EXPECT_FALSE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
// As should frames with a decreased NTP timestamp.
input_frames_[0]->set_ntp_time_ms(input_frames_[0]->ntp_time_ms() - 1);
AddInputFrame(input_frames_[0]);
- EXPECT_EQ(kEventTimeout, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ EXPECT_FALSE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
// But delivering with an increased NTP timestamp should succeed.
input_frames_[0]->set_ntp_time_ms(4711);
@@ -191,7 +191,7 @@ TEST_F(VideoCaptureInputTest, TestTextureFrames) {
for (int i = 0 ; i < kNumFrame; ++i) {
test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
// Add one to |i| so that width/height > 0.
- input_frames_.push_back(new VideoFrame(test::CreateFakeNativeHandleFrame(
+ input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
dummy_handle, i + 1, i + 1, i + 1, i + 1, webrtc::kVideoRotation_0)));
AddInputFrame(input_frames_[i]);
WaitOutputFrame();
@@ -220,7 +220,7 @@ TEST_F(VideoCaptureInputTest, TestI420Frames) {
TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) {
test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
- input_frames_.push_back(new VideoFrame(test::CreateFakeNativeHandleFrame(
+ input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0)));
AddInputFrame(input_frames_[0]);
WaitOutputFrame();
@@ -239,7 +239,7 @@ TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) {
WaitOutputFrame();
test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
- input_frames_.push_back(new VideoFrame(test::CreateFakeNativeHandleFrame(
+ input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0)));
AddInputFrame(input_frames_[1]);
WaitOutputFrame();
diff --git a/webrtc/video/video_decoder.cc b/webrtc/video/video_decoder.cc
index fa1f2ee878..d699175274 100644
--- a/webrtc/video/video_decoder.cc
+++ b/webrtc/video/video_decoder.cc
@@ -76,6 +76,9 @@ bool VideoDecoderSoftwareFallbackWrapper::InitFallbackDecoder() {
}
if (callback_ != nullptr)
fallback_decoder_->RegisterDecodeCompleteCallback(callback_);
+ fallback_implementation_name_ =
+ std::string(fallback_decoder_->ImplementationName()) +
+ " (fallback from: " + decoder_->ImplementationName() + ")";
return true;
}
@@ -131,4 +134,16 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Reset() {
return decoder_->Reset();
}
+bool VideoDecoderSoftwareFallbackWrapper::PrefersLateDecoding() const {
+ if (fallback_decoder_)
+ return fallback_decoder_->PrefersLateDecoding();
+ return decoder_->PrefersLateDecoding();
+}
+
+const char* VideoDecoderSoftwareFallbackWrapper::ImplementationName() const {
+ if (fallback_decoder_)
+ return fallback_implementation_name_.c_str();
+ return decoder_->ImplementationName();
+}
+
} // namespace webrtc
diff --git a/webrtc/video/video_decoder_unittest.cc b/webrtc/video/video_decoder_unittest.cc
index be09b191ac..4d54a3e53f 100644
--- a/webrtc/video/video_decoder_unittest.cc
+++ b/webrtc/video/video_decoder_unittest.cc
@@ -11,7 +11,8 @@
#include "webrtc/video_decoder.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_coding/include/video_error_codes.h"
namespace webrtc {
@@ -52,6 +53,11 @@ class VideoDecoderSoftwareFallbackWrapperTest : public ::testing::Test {
++reset_count_;
return WEBRTC_VIDEO_CODEC_OK;
}
+
+ const char* ImplementationName() const override {
+ return "fake-decoder";
+ }
+
int init_decode_count_ = 0;
int decode_count_ = 0;
int32_t decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
@@ -143,11 +149,16 @@ TEST_F(VideoDecoderSoftwareFallbackWrapperTest, ForwardsResetCall) {
}
// TODO(pbos): Fake a VP8 frame well enough to actually receive a callback from
-// the software encoder.
+// the software decoder.
TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
ForwardsRegisterDecodeCompleteCallback) {
class FakeDecodedImageCallback : public DecodedImageCallback {
int32_t Decoded(VideoFrame& decodedImage) override { return 0; }
+ int32_t Decoded(
+ webrtc::VideoFrame& decodedImage, int64_t decode_time_ms) override {
+ RTC_NOTREACHED();
+ return -1;
+ }
} callback, callback2;
VideoCodec codec = {};
@@ -162,4 +173,19 @@ TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
EXPECT_EQ(&callback2, fake_decoder_.decode_complete_callback_);
}
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ ReportsFallbackImplementationName) {
+ VideoCodec codec = {};
+ fallback_wrapper_.InitDecode(&codec, 2);
+
+ fake_decoder_.decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ EXPECT_STREQ("libvpx (fallback from: fake-decoder)",
+ fallback_wrapper_.ImplementationName());
+ fallback_wrapper_.Release();
+}
+
} // namespace webrtc
diff --git a/webrtc/video/video_encoder.cc b/webrtc/video/video_encoder.cc
index 6410e395fc..e85e3d97a7 100644
--- a/webrtc/video/video_encoder.cc
+++ b/webrtc/video/video_encoder.cc
@@ -76,6 +76,9 @@ bool VideoEncoderSoftwareFallbackWrapper::InitFallbackEncoder() {
if (channel_parameters_set_)
fallback_encoder_->SetChannelParameters(packet_loss_, rtt_);
+ fallback_implementation_name_ =
+ std::string(fallback_encoder_->ImplementationName()) +
+ " (fallback from: " + encoder_->ImplementationName() + ")";
// Since we're switching to the fallback encoder, Release the real encoder. It
// may be re-initialized via InitEncode later, and it will continue to get
// Set calls for rates and channel parameters in the meantime.
@@ -182,6 +185,12 @@ bool VideoEncoderSoftwareFallbackWrapper::SupportsNativeHandle() const {
return encoder_->SupportsNativeHandle();
}
+const char* VideoEncoderSoftwareFallbackWrapper::ImplementationName() const {
+ if (fallback_encoder_)
+ return fallback_implementation_name_.c_str();
+ return encoder_->ImplementationName();
+}
+
int VideoEncoderSoftwareFallbackWrapper::GetTargetFramerate() {
if (fallback_encoder_)
return fallback_encoder_->GetTargetFramerate();
diff --git a/webrtc/video/video_encoder_unittest.cc b/webrtc/video/video_encoder_unittest.cc
index 3382be83be..0f28f89163 100644
--- a/webrtc/video/video_encoder_unittest.cc
+++ b/webrtc/video/video_encoder_unittest.cc
@@ -11,7 +11,7 @@
#include "webrtc/video_encoder.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
+#include "webrtc/modules/video_coding/include/video_error_codes.h"
namespace webrtc {
@@ -67,6 +67,10 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test {
return false;
}
+ const char* ImplementationName() const override {
+ return "fake-encoder";
+ }
+
int init_encode_count_ = 0;
int32_t init_encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
int32_t encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
@@ -259,4 +263,13 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
}
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ ReportsFallbackImplementationName) {
+ UtilizeFallbackEncoder();
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ EXPECT_STREQ("libvpx (fallback from: fake-encoder)",
+ fallback_wrapper_.ImplementationName());
+}
+
} // namespace webrtc
diff --git a/webrtc/video/video_loopback.cc b/webrtc/video/video_loopback.cc
index 0c06f85fcc..2338a84a43 100644
--- a/webrtc/video/video_loopback.cc
+++ b/webrtc/video/video_loopback.cc
@@ -20,6 +20,7 @@
namespace webrtc {
namespace flags {
+// Flags common with screenshare loopback, with different default values.
DEFINE_int32(width, 640, "Video width.");
size_t Width() {
return static_cast<size_t>(FLAGS_width);
@@ -55,11 +56,46 @@ int MaxBitrateKbps() {
return static_cast<int>(FLAGS_max_bitrate);
}
+DEFINE_int32(num_temporal_layers,
+ 1,
+ "Number of temporal layers. Set to 1-4 to override.");
+int NumTemporalLayers() {
+ return static_cast<int>(FLAGS_num_temporal_layers);
+}
+
+// Flags common with screenshare loopback, with equal default values.
DEFINE_string(codec, "VP8", "Video codec to use.");
std::string Codec() {
return static_cast<std::string>(FLAGS_codec);
}
+DEFINE_int32(selected_tl,
+ -1,
+ "Temporal layer to show or analyze. -1 to disable filtering.");
+int SelectedTL() {
+ return static_cast<int>(FLAGS_selected_tl);
+}
+
+DEFINE_int32(
+ duration,
+ 0,
+ "Duration of the test in seconds. If 0, rendered will be shown instead.");
+int DurationSecs() {
+ return static_cast<int>(FLAGS_duration);
+}
+
+DEFINE_string(output_filename, "", "Target graph data filename.");
+std::string OutputFilename() {
+ return static_cast<std::string>(FLAGS_output_filename);
+}
+
+DEFINE_string(graph_title,
+ "",
+ "If empty, title will be generated automatically.");
+std::string GraphTitle() {
+ return static_cast<std::string>(FLAGS_graph_title);
+}
+
DEFINE_int32(loss_percent, 0, "Percentage of packets randomly lost.");
int LossPercent() {
return static_cast<int>(FLAGS_loss_percent);
@@ -91,8 +127,55 @@ int StdPropagationDelayMs() {
return static_cast<int>(FLAGS_std_propagation_delay_ms);
}
+DEFINE_int32(selected_stream, 0, "ID of the stream to show or analyze.");
+int SelectedStream() {
+ return static_cast<int>(FLAGS_selected_stream);
+}
+
+DEFINE_int32(num_spatial_layers, 1, "Number of spatial layers to use.");
+int NumSpatialLayers() {
+ return static_cast<int>(FLAGS_num_spatial_layers);
+}
+
+DEFINE_int32(selected_sl,
+ -1,
+ "Spatial layer to show or analyze. -1 to disable filtering.");
+int SelectedSL() {
+ return static_cast<int>(FLAGS_selected_sl);
+}
+
+DEFINE_string(stream0,
+ "",
+ "Comma separated values describing VideoStream for stream #0.");
+std::string Stream0() {
+ return static_cast<std::string>(FLAGS_stream0);
+}
+
+DEFINE_string(stream1,
+ "",
+ "Comma separated values describing VideoStream for stream #1.");
+std::string Stream1() {
+ return static_cast<std::string>(FLAGS_stream1);
+}
+
+DEFINE_string(sl0,
+ "",
+ "Comma separated values describing SpatialLayer for layer #0.");
+std::string SL0() {
+ return static_cast<std::string>(FLAGS_sl0);
+}
+
+DEFINE_string(sl1,
+ "",
+ "Comma separated values describing SpatialLayer for layer #1.");
+std::string SL1() {
+ return static_cast<std::string>(FLAGS_sl1);
+}
+
DEFINE_bool(logs, false, "print logs to stderr");
+DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
+
DEFINE_string(
force_fieldtrials,
"",
@@ -101,21 +184,7 @@ DEFINE_string(
" will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
-DEFINE_int32(num_temporal_layers,
- 1,
- "Number of temporal layers. Set to 1-4 to override.");
-size_t NumTemporalLayers() {
- return static_cast<size_t>(FLAGS_num_temporal_layers);
-}
-
-DEFINE_int32(
- tl_discard_threshold,
- 0,
- "Discard TLs with id greater or equal the threshold. 0 to disable.");
-size_t TLDiscardThreshold() {
- return static_cast<size_t>(FLAGS_tl_discard_threshold);
-}
-
+// Video-specific flags.
DEFINE_string(clip,
"",
"Name of the clip to show. If empty, using chroma generator.");
@@ -123,21 +192,6 @@ std::string Clip() {
return static_cast<std::string>(FLAGS_clip);
}
-DEFINE_string(
- output_filename,
- "",
- "Name of a target graph data file. If set, no preview will be shown.");
-std::string OutputFilename() {
- return static_cast<std::string>(FLAGS_output_filename);
-}
-
-DEFINE_int32(duration, 60, "Duration of the test in seconds.");
-int DurationSecs() {
- return static_cast<int>(FLAGS_duration);
-}
-
-DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
-
} // namespace flags
void Loopback() {
@@ -153,27 +207,36 @@ void Loopback() {
call_bitrate_config.start_bitrate_bps = flags::StartBitrateKbps() * 1000;
call_bitrate_config.max_bitrate_bps = flags::MaxBitrateKbps() * 1000;
- std::string clip = flags::Clip();
- std::string graph_title = clip.empty() ? "" : "video " + clip;
VideoQualityTest::Params params{
{flags::Width(), flags::Height(), flags::Fps(),
flags::MinBitrateKbps() * 1000, flags::TargetBitrateKbps() * 1000,
flags::MaxBitrateKbps() * 1000, flags::Codec(),
- flags::NumTemporalLayers(),
+ flags::NumTemporalLayers(), flags::SelectedTL(),
0, // No min transmit bitrate.
- call_bitrate_config, flags::TLDiscardThreshold(),
- flags::FLAGS_send_side_bwe},
- {clip},
+ call_bitrate_config, flags::FLAGS_send_side_bwe},
+ {flags::Clip()},
{}, // Screenshare specific.
- {graph_title, 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename()},
+ {"video", 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename(),
+ flags::GraphTitle()},
pipe_config,
flags::FLAGS_logs};
+ std::vector<std::string> stream_descriptors;
+ stream_descriptors.push_back(flags::Stream0());
+ stream_descriptors.push_back(flags::Stream1());
+ std::vector<std::string> SL_descriptors;
+ SL_descriptors.push_back(flags::SL0());
+ SL_descriptors.push_back(flags::SL1());
+ VideoQualityTest::FillScalabilitySettings(
+ &params, stream_descriptors, flags::SelectedStream(),
+ flags::NumSpatialLayers(), flags::SelectedSL(), SL_descriptors);
+
VideoQualityTest test;
- if (flags::OutputFilename().empty())
- test.RunWithVideoRenderer(params);
- else
+ if (flags::DurationSecs()) {
test.RunWithAnalyzer(params);
+ } else {
+ test.RunWithVideoRenderer(params);
+ }
}
} // namespace webrtc
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index 0f45fa6632..08ae0a9cee 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -12,16 +12,20 @@
#include <algorithm>
#include <deque>
#include <map>
+#include <sstream>
+#include <string>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/event.h"
#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/include/cpu_info.h"
#include "webrtc/test/layer_filtering_transport.h"
#include "webrtc/test/run_loop.h"
@@ -43,18 +47,22 @@ class VideoAnalyzer : public PacketReceiver,
public EncodedFrameObserver,
public EncodingTimeObserver {
public:
- VideoAnalyzer(Transport* transport,
+ VideoAnalyzer(test::LayerFilteringTransport* transport,
const std::string& test_label,
double avg_psnr_threshold,
double avg_ssim_threshold,
int duration_frames,
- FILE* graph_data_output_file)
+ FILE* graph_data_output_file,
+ const std::string& graph_title,
+ uint32_t ssrc_to_analyze)
: input_(nullptr),
transport_(transport),
receiver_(nullptr),
send_stream_(nullptr),
test_label_(test_label),
graph_data_output_file_(graph_data_output_file),
+ graph_title_(graph_title),
+ ssrc_to_analyze_(ssrc_to_analyze),
frames_to_process_(duration_frames),
frames_recorded_(0),
frames_processed_(0),
@@ -63,8 +71,9 @@ class VideoAnalyzer : public PacketReceiver,
rtp_timestamp_delta_(0),
avg_psnr_threshold_(avg_psnr_threshold),
avg_ssim_threshold_(avg_ssim_threshold),
- comparison_available_event_(EventWrapper::Create()),
- done_(EventWrapper::Create()) {
+ stats_polling_thread_(&PollStatsThread, this, "StatsPoller"),
+ comparison_available_event_(false, false),
+ done_(false, false) {
// Create thread pool for CPU-expensive PSNR/SSIM calculations.
// Try to use about as many threads as cores, but leave kMinCoresLeft alone,
@@ -85,20 +94,16 @@ class VideoAnalyzer : public PacketReceiver,
}
for (uint32_t i = 0; i < num_cores; ++i) {
- rtc::scoped_ptr<ThreadWrapper> thread =
- ThreadWrapper::CreateThread(&FrameComparisonThread, this, "Analyzer");
- EXPECT_TRUE(thread->Start());
- comparison_thread_pool_.push_back(thread.release());
+ rtc::PlatformThread* thread =
+ new rtc::PlatformThread(&FrameComparisonThread, this, "Analyzer");
+ thread->Start();
+ comparison_thread_pool_.push_back(thread);
}
-
- stats_polling_thread_ =
- ThreadWrapper::CreateThread(&PollStatsThread, this, "StatsPoller");
- EXPECT_TRUE(stats_polling_thread_->Start());
}
~VideoAnalyzer() {
- for (ThreadWrapper* thread : comparison_thread_pool_) {
- EXPECT_TRUE(thread->Stop());
+ for (rtc::PlatformThread* thread : comparison_thread_pool_) {
+ thread->Stop();
delete thread;
}
}
@@ -109,9 +114,9 @@ class VideoAnalyzer : public PacketReceiver,
const uint8_t* packet,
size_t length,
const PacketTime& packet_time) override {
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ RtpUtility::RtpHeaderParser parser(packet, length);
RTPHeader header;
- parser->Parse(packet, length, &header);
+ parser.Parse(&header);
{
rtc::CritScope lock(&crit_);
recv_times_[header.timestamp - rtp_timestamp_delta_] =
@@ -145,10 +150,13 @@ class VideoAnalyzer : public PacketReceiver,
bool SendRtp(const uint8_t* packet,
size_t length,
const PacketOptions& options) override {
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ RtpUtility::RtpHeaderParser parser(packet, length);
RTPHeader header;
- parser->Parse(packet, length, &header);
+ parser.Parse(&header);
+ int64_t current_time =
+ Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
+ bool result = transport_->SendRtp(packet, length, options);
{
rtc::CritScope lock(&crit_);
if (rtp_timestamp_delta_ == 0) {
@@ -156,13 +164,14 @@ class VideoAnalyzer : public PacketReceiver,
first_send_frame_.Reset();
}
uint32_t timestamp = header.timestamp - rtp_timestamp_delta_;
- send_times_[timestamp] =
- Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
- encoded_frame_sizes_[timestamp] +=
- length - (header.headerLength + header.paddingLength);
+ send_times_[timestamp] = current_time;
+ if (!transport_->DiscardedLastPacket() &&
+ header.ssrc == ssrc_to_analyze_) {
+ encoded_frame_sizes_[timestamp] +=
+ length - (header.headerLength + header.paddingLength);
+ }
}
-
- return transport_->SendRtp(packet, length, options);
+ return result;
}
bool SendRtcp(const uint8_t* packet, size_t length) override {
@@ -192,6 +201,11 @@ class VideoAnalyzer : public PacketReceiver,
VideoFrame reference_frame = frames_.front();
frames_.pop_front();
assert(!reference_frame.IsZeroSize());
+ if (send_timestamp == reference_frame.timestamp() - 1) {
+ // TODO(ivica): Make this work for > 2 streams.
+ // Look at rtp_sender.c:RTPSender::BuildRTPHeader.
+ ++send_timestamp;
+ }
EXPECT_EQ(reference_frame.timestamp(), send_timestamp);
assert(reference_frame.timestamp() == send_timestamp);
@@ -207,11 +221,11 @@ class VideoAnalyzer : public PacketReceiver,
// at time-out check if frames_processed is going up. If so, give it more
// time, otherwise fail. Hopefully this will reduce test flakiness.
+ stats_polling_thread_.Start();
+
int last_frames_processed = -1;
- EventTypeWrapper eventType;
int iteration = 0;
- while ((eventType = done_->Wait(VideoQualityTest::kDefaultTimeoutMs)) !=
- kEventSignaled) {
+ while (!done_.Wait(VideoQualityTest::kDefaultTimeoutMs)) {
int frames_processed;
{
rtc::CritScope crit(&comparison_lock_);
@@ -240,12 +254,12 @@ class VideoAnalyzer : public PacketReceiver,
// Signal stats polling thread if that is still waiting and stop it now,
// since it uses the send_stream_ reference that might be reclaimed after
// returning from this method.
- done_->Set();
- EXPECT_TRUE(stats_polling_thread_->Stop());
+ done_.Set();
+ stats_polling_thread_.Stop();
}
VideoCaptureInput* input_;
- Transport* const transport_;
+ test::LayerFilteringTransport* const transport_;
PacketReceiver* receiver_;
VideoSendStream* send_stream_;
@@ -320,8 +334,13 @@ class VideoAnalyzer : public PacketReceiver,
int64_t recv_time_ms = recv_times_[reference.timestamp()];
recv_times_.erase(reference.timestamp());
- size_t encoded_size = encoded_frame_sizes_[reference.timestamp()];
- encoded_frame_sizes_.erase(reference.timestamp());
+ // TODO(ivica): Make this work for > 2 streams.
+ auto it = encoded_frame_sizes_.find(reference.timestamp());
+ if (it == encoded_frame_sizes_.end())
+ it = encoded_frame_sizes_.find(reference.timestamp() - 1);
+ size_t encoded_size = it == encoded_frame_sizes_.end() ? 0 : it->second;
+ if (it != encoded_frame_sizes_.end())
+ encoded_frame_sizes_.erase(it);
VideoFrame reference_copy;
VideoFrame render_copy;
@@ -332,7 +351,7 @@ class VideoAnalyzer : public PacketReceiver,
comparisons_.push_back(FrameComparison(reference_copy, render_copy, dropped,
send_time_ms, recv_time_ms,
render_time_ms, encoded_size));
- comparison_available_event_->Set();
+ comparison_available_event_.Set();
}
static bool PollStatsThread(void* obj) {
@@ -340,15 +359,11 @@ class VideoAnalyzer : public PacketReceiver,
}
bool PollStats() {
- switch (done_->Wait(kSendStatsPollingIntervalMs)) {
- case kEventSignaled:
- case kEventError:
- done_->Set(); // Make sure main thread is also signaled.
- return false;
- case kEventTimeout:
- break;
- default:
- RTC_NOTREACHED();
+ if (done_.Wait(kSendStatsPollingIntervalMs)) {
+ // Set event again to make sure main thread is also signaled, then we're
+ // done.
+ done_.Set();
+ return false;
}
VideoSendStream::Stats stats = send_stream_->GetStats();
@@ -377,9 +392,9 @@ class VideoAnalyzer : public PacketReceiver,
if (!PopComparison(&comparison)) {
// Wait until new comparison task is available, or test is done.
// If done, wake up remaining threads waiting.
- comparison_available_event_->Wait(1000);
+ comparison_available_event_.Wait(1000);
if (AllFramesRecorded()) {
- comparison_available_event_->Set();
+ comparison_available_event_.Set();
return false;
}
return true; // Try again.
@@ -391,8 +406,8 @@ class VideoAnalyzer : public PacketReceiver,
PrintResults();
if (graph_data_output_file_)
PrintSamplesToFile();
- done_->Set();
- comparison_available_event_->Set();
+ done_.Set();
+ comparison_available_event_.Set();
return false;
}
@@ -509,7 +524,7 @@ class VideoAnalyzer : public PacketReceiver,
return A.input_time_ms < B.input_time_ms;
});
- fprintf(out, "%s\n", test_label_.c_str());
+ fprintf(out, "%s\n", graph_title_.c_str());
fprintf(out, "%" PRIuS "\n", samples_.size());
fprintf(out,
"dropped "
@@ -547,6 +562,8 @@ class VideoAnalyzer : public PacketReceiver,
const std::string test_label_;
FILE* const graph_data_output_file_;
+ const std::string graph_title_;
+ const uint32_t ssrc_to_analyze_;
std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
std::map<int64_t, int> samples_encode_time_ms_ GUARDED_BY(comparison_lock_);
test::Statistics sender_time_ GUARDED_BY(comparison_lock_);
@@ -579,104 +596,260 @@ class VideoAnalyzer : public PacketReceiver,
const double avg_ssim_threshold_;
rtc::CriticalSection comparison_lock_;
- std::vector<ThreadWrapper*> comparison_thread_pool_;
- rtc::scoped_ptr<ThreadWrapper> stats_polling_thread_;
- const rtc::scoped_ptr<EventWrapper> comparison_available_event_;
+ std::vector<rtc::PlatformThread*> comparison_thread_pool_;
+ rtc::PlatformThread stats_polling_thread_;
+ rtc::Event comparison_available_event_;
std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_);
- const rtc::scoped_ptr<EventWrapper> done_;
+ rtc::Event done_;
};
VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {}
-void VideoQualityTest::ValidateParams(const Params& params) {
- RTC_CHECK_GE(params.common.max_bitrate_bps, params.common.target_bitrate_bps);
- RTC_CHECK_GE(params.common.target_bitrate_bps, params.common.min_bitrate_bps);
- RTC_CHECK_LT(params.common.tl_discard_threshold,
- params.common.num_temporal_layers);
+void VideoQualityTest::TestBody() {}
+
+std::string VideoQualityTest::GenerateGraphTitle() const {
+ std::stringstream ss;
+ ss << params_.common.codec;
+ ss << " (" << params_.common.target_bitrate_bps / 1000 << "kbps";
+ ss << ", " << params_.common.fps << " FPS";
+ if (params_.screenshare.scroll_duration)
+ ss << ", " << params_.screenshare.scroll_duration << "s scroll";
+ if (params_.ss.streams.size() > 1)
+ ss << ", Stream #" << params_.ss.selected_stream;
+ if (params_.ss.num_spatial_layers > 1)
+ ss << ", Layer #" << params_.ss.selected_sl;
+ ss << ")";
+ return ss.str();
}
-void VideoQualityTest::TestBody() {}
+void VideoQualityTest::CheckParams() {
+ // Add a default stream in none specified.
+ if (params_.ss.streams.empty())
+ params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_));
+ if (params_.ss.num_spatial_layers == 0)
+ params_.ss.num_spatial_layers = 1;
+
+ if (params_.pipe.loss_percent != 0 ||
+ params_.pipe.queue_length_packets != 0) {
+ // Since LayerFilteringTransport changes the sequence numbers, we can't
+ // use that feature with pack loss, since the NACK request would end up
+ // retransmitting the wrong packets.
+ RTC_CHECK(params_.ss.selected_sl == -1 ||
+ params_.ss.selected_sl == params_.ss.num_spatial_layers - 1);
+ RTC_CHECK(params_.common.selected_tl == -1 ||
+ params_.common.selected_tl ==
+ params_.common.num_temporal_layers - 1);
+ }
+
+ // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it
+ // does in some parts of the code?
+ RTC_CHECK_GE(params_.common.max_bitrate_bps,
+ params_.common.target_bitrate_bps);
+ RTC_CHECK_GE(params_.common.target_bitrate_bps,
+ params_.common.min_bitrate_bps);
+ RTC_CHECK_LT(params_.common.selected_tl, params_.common.num_temporal_layers);
+ RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size());
+ for (const VideoStream& stream : params_.ss.streams) {
+ RTC_CHECK_GE(stream.min_bitrate_bps, 0);
+ RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps);
+ RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps);
+ RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()),
+ params_.common.num_temporal_layers - 1);
+ }
+ // TODO(ivica): Should we check if the sum of all streams/layers is equal to
+ // the total bitrate? We anyway have to update them in the case bitrate
+ // estimator changes the total bitrates.
+ RTC_CHECK_GE(params_.ss.num_spatial_layers, 1);
+ RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers);
+ RTC_CHECK(params_.ss.spatial_layers.empty() ||
+ params_.ss.spatial_layers.size() ==
+ static_cast<size_t>(params_.ss.num_spatial_layers));
+ if (params_.common.codec == "VP8") {
+ RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1);
+ } else if (params_.common.codec == "VP9") {
+ RTC_CHECK_EQ(params_.ss.streams.size(), 1u);
+ }
+}
+
+// Static.
+std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) {
+ // Parse comma separated nonnegative integers, where some elements may be
+ // empty. The empty values are replaced with -1.
+ // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40}
+ // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1}
+ std::vector<int> result;
+ if (str.empty())
+ return result;
+
+ const char* p = str.c_str();
+ int value = -1;
+ int pos;
+ while (*p) {
+ if (*p == ',') {
+ result.push_back(value);
+ value = -1;
+ ++p;
+ continue;
+ }
+ RTC_CHECK_EQ(sscanf(p, "%d%n", &value, &pos), 1)
+ << "Unexpected non-number value.";
+ p += pos;
+ }
+ result.push_back(value);
+ return result;
+}
+
+// Static.
+VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) {
+ VideoStream stream;
+ stream.width = params.common.width;
+ stream.height = params.common.height;
+ stream.max_framerate = params.common.fps;
+ stream.min_bitrate_bps = params.common.min_bitrate_bps;
+ stream.target_bitrate_bps = params.common.target_bitrate_bps;
+ stream.max_bitrate_bps = params.common.max_bitrate_bps;
+ stream.max_qp = 52;
+ if (params.common.num_temporal_layers == 2)
+ stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps);
+ return stream;
+}
-void VideoQualityTest::SetupFullStack(const Params& params,
- Transport* send_transport,
- Transport* recv_transport) {
- if (params.logs)
+// Static.
+void VideoQualityTest::FillScalabilitySettings(
+ Params* params,
+ const std::vector<std::string>& stream_descriptors,
+ size_t selected_stream,
+ int num_spatial_layers,
+ int selected_sl,
+ const std::vector<std::string>& sl_descriptors) {
+ // Read VideoStream and SpatialLayer elements from a list of comma separated
+ // lists. To use a default value for an element, use -1 or leave empty.
+ // Validity checks performed in CheckParams.
+
+ RTC_CHECK(params->ss.streams.empty());
+ for (auto descriptor : stream_descriptors) {
+ if (descriptor.empty())
+ continue;
+ VideoStream stream = VideoQualityTest::DefaultVideoStream(*params);
+ std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
+ if (v[0] != -1)
+ stream.width = static_cast<size_t>(v[0]);
+ if (v[1] != -1)
+ stream.height = static_cast<size_t>(v[1]);
+ if (v[2] != -1)
+ stream.max_framerate = v[2];
+ if (v[3] != -1)
+ stream.min_bitrate_bps = v[3];
+ if (v[4] != -1)
+ stream.target_bitrate_bps = v[4];
+ if (v[5] != -1)
+ stream.max_bitrate_bps = v[5];
+ if (v.size() > 6 && v[6] != -1)
+ stream.max_qp = v[6];
+ if (v.size() > 7) {
+ stream.temporal_layer_thresholds_bps.clear();
+ stream.temporal_layer_thresholds_bps.insert(
+ stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end());
+ } else {
+ // Automatic TL thresholds for more than two layers not supported.
+ RTC_CHECK_LE(params->common.num_temporal_layers, 2);
+ }
+ params->ss.streams.push_back(stream);
+ }
+ params->ss.selected_stream = selected_stream;
+
+ params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1;
+ params->ss.selected_sl = selected_sl;
+ RTC_CHECK(params->ss.spatial_layers.empty());
+ for (auto descriptor : sl_descriptors) {
+ if (descriptor.empty())
+ continue;
+ std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
+ RTC_CHECK_GT(v[2], 0);
+
+ SpatialLayer layer;
+ layer.scaling_factor_num = v[0] == -1 ? 1 : v[0];
+ layer.scaling_factor_den = v[1] == -1 ? 1 : v[1];
+ layer.target_bitrate_bps = v[2];
+ params->ss.spatial_layers.push_back(layer);
+ }
+}
+
+void VideoQualityTest::SetupCommon(Transport* send_transport,
+ Transport* recv_transport) {
+ if (params_.logs)
trace_to_stderr_.reset(new test::TraceToStderr);
- CreateSendConfig(1, send_transport);
+ size_t num_streams = params_.ss.streams.size();
+ CreateSendConfig(num_streams, 0, send_transport);
int payload_type;
- if (params.common.codec == "VP8") {
+ if (params_.common.codec == "VP8") {
encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8));
payload_type = kPayloadTypeVP8;
- } else if (params.common.codec == "VP9") {
+ } else if (params_.common.codec == "VP9") {
encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9));
payload_type = kPayloadTypeVP9;
} else {
RTC_NOTREACHED() << "Codec not supported!";
return;
}
- send_config_.encoder_settings.encoder = encoder_.get();
- send_config_.encoder_settings.payload_name = params.common.codec;
- send_config_.encoder_settings.payload_type = payload_type;
-
- send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
- send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
- send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
-
- send_config_.rtp.extensions.clear();
- if (params.common.send_side_bwe) {
- send_config_.rtp.extensions.push_back(
+ video_send_config_.encoder_settings.encoder = encoder_.get();
+ video_send_config_.encoder_settings.payload_name = params_.common.codec;
+ video_send_config_.encoder_settings.payload_type = payload_type;
+ video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
+ for (size_t i = 0; i < num_streams; ++i)
+ video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+
+ video_send_config_.rtp.extensions.clear();
+ if (params_.common.send_side_bwe) {
+ video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTransportSequenceNumber,
test::kTransportSequenceNumberExtensionId));
} else {
- send_config_.rtp.extensions.push_back(RtpExtension(
+ video_send_config_.rtp.extensions.push_back(RtpExtension(
RtpExtension::kAbsSendTime, test::kAbsSendTimeExtensionId));
}
- // Automatically fill out streams[0] with params.
- VideoStream* stream = &encoder_config_.streams[0];
- stream->width = params.common.width;
- stream->height = params.common.height;
- stream->min_bitrate_bps = params.common.min_bitrate_bps;
- stream->target_bitrate_bps = params.common.target_bitrate_bps;
- stream->max_bitrate_bps = params.common.max_bitrate_bps;
- stream->max_framerate = static_cast<int>(params.common.fps);
-
- stream->temporal_layer_thresholds_bps.clear();
- if (params.common.num_temporal_layers > 1) {
- stream->temporal_layer_thresholds_bps.push_back(stream->target_bitrate_bps);
- }
+ video_encoder_config_.min_transmit_bitrate_bps =
+ params_.common.min_transmit_bps;
+ video_encoder_config_.streams = params_.ss.streams;
+ video_encoder_config_.spatial_layers = params_.ss.spatial_layers;
CreateMatchingReceiveConfigs(recv_transport);
- receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
- receive_configs_[0].rtp.rtx[kSendRtxPayloadType].ssrc = kSendRtxSsrcs[0];
- receive_configs_[0].rtp.rtx[kSendRtxPayloadType].payload_type =
- kSendRtxPayloadType;
-
- encoder_config_.min_transmit_bitrate_bps = params.common.min_transmit_bps;
+ for (size_t i = 0; i < num_streams; ++i) {
+ video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ video_receive_configs_[i].rtp.rtx[kSendRtxPayloadType].ssrc =
+ kSendRtxSsrcs[i];
+ video_receive_configs_[i].rtp.rtx[kSendRtxPayloadType].payload_type =
+ kSendRtxPayloadType;
+ video_receive_configs_[i].rtp.transport_cc = params_.common.send_side_bwe;
+ }
}
-void VideoQualityTest::SetupScreenshare(const Params& params) {
- RTC_CHECK(params.screenshare.enabled);
+void VideoQualityTest::SetupScreenshare() {
+ RTC_CHECK(params_.screenshare.enabled);
// Fill out codec settings.
- encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
- if (params.common.codec == "VP8") {
+ video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
+ if (params_.common.codec == "VP8") {
codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings();
codec_settings_.VP8.denoisingOn = false;
codec_settings_.VP8.frameDroppingOn = false;
codec_settings_.VP8.numberOfTemporalLayers =
- static_cast<unsigned char>(params.common.num_temporal_layers);
- encoder_config_.encoder_specific_settings = &codec_settings_.VP8;
- } else if (params.common.codec == "VP9") {
+ static_cast<unsigned char>(params_.common.num_temporal_layers);
+ video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8;
+ } else if (params_.common.codec == "VP9") {
codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings();
codec_settings_.VP9.denoisingOn = false;
codec_settings_.VP9.frameDroppingOn = false;
codec_settings_.VP9.numberOfTemporalLayers =
- static_cast<unsigned char>(params.common.num_temporal_layers);
- encoder_config_.encoder_specific_settings = &codec_settings_.VP9;
+ static_cast<unsigned char>(params_.common.num_temporal_layers);
+ video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9;
+ codec_settings_.VP9.numberOfSpatialLayers =
+ static_cast<unsigned char>(params_.ss.num_spatial_layers);
}
// Setup frame generator.
@@ -688,71 +861,67 @@ void VideoQualityTest::SetupScreenshare(const Params& params) {
slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
- if (params.screenshare.scroll_duration == 0) {
+ if (params_.screenshare.scroll_duration == 0) {
// Cycle image every slide_change_interval seconds.
frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
slides, kWidth, kHeight,
- params.screenshare.slide_change_interval * params.common.fps));
+ params_.screenshare.slide_change_interval * params_.common.fps));
} else {
- RTC_CHECK_LE(params.common.width, kWidth);
- RTC_CHECK_LE(params.common.height, kHeight);
- RTC_CHECK_GT(params.screenshare.slide_change_interval, 0);
- const int kPauseDurationMs = (params.screenshare.slide_change_interval -
- params.screenshare.scroll_duration) * 1000;
- RTC_CHECK_LE(params.screenshare.scroll_duration,
- params.screenshare.slide_change_interval);
-
- if (params.screenshare.scroll_duration) {
- frame_generator_.reset(
- test::FrameGenerator::CreateScrollingInputFromYuvFiles(
- clock_, slides, kWidth, kHeight, params.common.width,
- params.common.height, params.screenshare.scroll_duration * 1000,
- kPauseDurationMs));
- } else {
- frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
- slides, kWidth, kHeight,
- params.screenshare.slide_change_interval * params.common.fps));
- }
+ RTC_CHECK_LE(params_.common.width, kWidth);
+ RTC_CHECK_LE(params_.common.height, kHeight);
+ RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0);
+ const int kPauseDurationMs = (params_.screenshare.slide_change_interval -
+ params_.screenshare.scroll_duration) *
+ 1000;
+ RTC_CHECK_LE(params_.screenshare.scroll_duration,
+ params_.screenshare.slide_change_interval);
+
+ frame_generator_.reset(
+ test::FrameGenerator::CreateScrollingInputFromYuvFiles(
+ clock_, slides, kWidth, kHeight, params_.common.width,
+ params_.common.height, params_.screenshare.scroll_duration * 1000,
+ kPauseDurationMs));
}
}
-void VideoQualityTest::CreateCapturer(const Params& params,
- VideoCaptureInput* input) {
- if (params.screenshare.enabled) {
- test::FrameGeneratorCapturer *frame_generator_capturer =
+void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) {
+ if (params_.screenshare.enabled) {
+ test::FrameGeneratorCapturer* frame_generator_capturer =
new test::FrameGeneratorCapturer(
- clock_, input, frame_generator_.release(), params.common.fps);
+ clock_, input, frame_generator_.release(), params_.common.fps);
EXPECT_TRUE(frame_generator_capturer->Init());
capturer_.reset(frame_generator_capturer);
} else {
- if (params.video.clip_name.empty()) {
- capturer_.reset(test::VideoCapturer::Create(
- input, params.common.width, params.common.height, params.common.fps,
- clock_));
+ if (params_.video.clip_name.empty()) {
+ capturer_.reset(test::VideoCapturer::Create(input, params_.common.width,
+ params_.common.height,
+ params_.common.fps, clock_));
} else {
capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile(
- input, test::ResourcePath(params.video.clip_name, "yuv"),
- params.common.width, params.common.height, params.common.fps,
+ input, test::ResourcePath(params_.video.clip_name, "yuv"),
+ params_.common.width, params_.common.height, params_.common.fps,
clock_));
ASSERT_TRUE(capturer_.get() != nullptr)
- << "Could not create capturer for " << params.video.clip_name
+ << "Could not create capturer for " << params_.video.clip_name
<< ".yuv. Is this resource file present?";
}
}
}
void VideoQualityTest::RunWithAnalyzer(const Params& params) {
+ params_ = params;
+
// TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
// differentiate between the analyzer and the renderer case.
- ValidateParams(params);
+ CheckParams();
FILE* graph_data_output_file = nullptr;
- if (!params.analyzer.graph_data_output_filename.empty()) {
+ if (!params_.analyzer.graph_data_output_filename.empty()) {
graph_data_output_file =
- fopen(params.analyzer.graph_data_output_filename.c_str(), "w");
+ fopen(params_.analyzer.graph_data_output_filename.c_str(), "w");
RTC_CHECK(graph_data_output_file != nullptr)
- << "Can't open the file "
- << params.analyzer.graph_data_output_filename << "!";
+ << "Can't open the file " << params_.analyzer.graph_data_output_filename
+ << "!";
}
Call::Config call_config;
@@ -761,37 +930,64 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
test::LayerFilteringTransport send_transport(
params.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9,
- static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
+ params.common.selected_tl, params_.ss.selected_sl);
test::DirectTransport recv_transport(params.pipe, receiver_call_.get());
+ std::string graph_title = params_.analyzer.graph_title;
+ if (graph_title.empty())
+ graph_title = VideoQualityTest::GenerateGraphTitle();
+
+ // In the case of different resolutions, the functions calculating PSNR and
+ // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer
+ // aborts if the average psnr/ssim are below the given threshold, which is
+ // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary
+ // abort.
+ VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream];
+ int selected_sl = params_.ss.selected_sl != -1
+ ? params_.ss.selected_sl
+ : params_.ss.num_spatial_layers - 1;
+ bool disable_quality_check =
+ selected_stream.width != params_.common.width ||
+ selected_stream.height != params_.common.height ||
+ (!params_.ss.spatial_layers.empty() &&
+ params_.ss.spatial_layers[selected_sl].scaling_factor_num !=
+ params_.ss.spatial_layers[selected_sl].scaling_factor_den);
+ if (disable_quality_check) {
+ fprintf(stderr,
+ "Warning: Calculating PSNR and SSIM for downsized resolution "
+ "not implemented yet! Skipping PSNR and SSIM calculations!");
+ }
+
VideoAnalyzer analyzer(
- &send_transport, params.analyzer.test_label,
- params.analyzer.avg_psnr_threshold, params.analyzer.avg_ssim_threshold,
- params.analyzer.test_durations_secs * params.common.fps,
- graph_data_output_file);
+ &send_transport, params_.analyzer.test_label,
+ disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold,
+ disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold,
+ params_.analyzer.test_durations_secs * params_.common.fps,
+ graph_data_output_file, graph_title,
+ kVideoSendSsrcs[params_.ss.selected_stream]);
analyzer.SetReceiver(receiver_call_->Receiver());
send_transport.SetReceiver(&analyzer);
recv_transport.SetReceiver(sender_call_->Receiver());
- SetupFullStack(params, &analyzer, &recv_transport);
- send_config_.encoding_time_observer = &analyzer;
- receive_configs_[0].renderer = &analyzer;
- for (auto& config : receive_configs_)
+ SetupCommon(&analyzer, &recv_transport);
+ video_send_config_.encoding_time_observer = &analyzer;
+ video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer;
+ for (auto& config : video_receive_configs_)
config.pre_decode_callback = &analyzer;
- if (params.screenshare.enabled)
- SetupScreenshare(params);
+ if (params_.screenshare.enabled)
+ SetupScreenshare();
- CreateStreams();
- analyzer.input_ = send_stream_->Input();
- analyzer.send_stream_ = send_stream_;
+ CreateVideoStreams();
+ analyzer.input_ = video_send_stream_->Input();
+ analyzer.send_stream_ = video_send_stream_;
- CreateCapturer(params, &analyzer);
+ CreateCapturer(&analyzer);
- send_stream_->Start();
- for (size_t i = 0; i < receive_streams_.size(); ++i)
- receive_streams_[i]->Start();
+ video_send_stream_->Start();
+ for (VideoReceiveStream* receive_stream : video_receive_streams_)
+ receive_stream->Start();
capturer_->Start();
analyzer.Wait();
@@ -800,9 +996,9 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
recv_transport.StopSending();
capturer_->Stop();
- for (size_t i = 0; i < receive_streams_.size(); ++i)
- receive_streams_[i]->Stop();
- send_stream_->Stop();
+ for (VideoReceiveStream* receive_stream : video_receive_streams_)
+ receive_stream->Stop();
+ video_send_stream_->Stop();
DestroyStreams();
@@ -811,53 +1007,65 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
}
void VideoQualityTest::RunWithVideoRenderer(const Params& params) {
- ValidateParams(params);
+ params_ = params;
+ CheckParams();
rtc::scoped_ptr<test::VideoRenderer> local_preview(
- test::VideoRenderer::Create("Local Preview", params.common.width,
- params.common.height));
+ test::VideoRenderer::Create("Local Preview", params_.common.width,
+ params_.common.height));
+ size_t stream_id = params_.ss.selected_stream;
+ std::string title = "Loopback Video";
+ if (params_.ss.streams.size() > 1) {
+ std::ostringstream s;
+ s << stream_id;
+ title += " - Stream #" + s.str();
+ }
+
rtc::scoped_ptr<test::VideoRenderer> loopback_video(
- test::VideoRenderer::Create("Loopback Video", params.common.width,
- params.common.height));
+ test::VideoRenderer::Create(title.c_str(),
+ params_.ss.streams[stream_id].width,
+ params_.ss.streams[stream_id].height));
// TODO(ivica): Remove bitrate_config and use the default Call::Config(), to
// match the full stack tests.
Call::Config call_config;
- call_config.bitrate_config = params.common.call_bitrate_config;
+ call_config.bitrate_config = params_.common.call_bitrate_config;
rtc::scoped_ptr<Call> call(Call::Create(call_config));
test::LayerFilteringTransport transport(
params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9,
- static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
+ params.common.selected_tl, params_.ss.selected_sl);
// TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
// least share as much code as possible. That way this test would also match
// the full stack tests better.
transport.SetReceiver(call->Receiver());
- SetupFullStack(params, &transport, &transport);
- send_config_.local_renderer = local_preview.get();
- receive_configs_[0].renderer = loopback_video.get();
+ SetupCommon(&transport, &transport);
+
+ video_send_config_.local_renderer = local_preview.get();
+ video_receive_configs_[stream_id].renderer = loopback_video.get();
- if (params.screenshare.enabled)
- SetupScreenshare(params);
+ if (params_.screenshare.enabled)
+ SetupScreenshare();
- send_stream_ = call->CreateVideoSendStream(send_config_, encoder_config_);
+ video_send_stream_ =
+ call->CreateVideoSendStream(video_send_config_, video_encoder_config_);
VideoReceiveStream* receive_stream =
- call->CreateVideoReceiveStream(receive_configs_[0]);
- CreateCapturer(params, send_stream_->Input());
+ call->CreateVideoReceiveStream(video_receive_configs_[stream_id]);
+ CreateCapturer(video_send_stream_->Input());
receive_stream->Start();
- send_stream_->Start();
+ video_send_stream_->Start();
capturer_->Start();
test::PressEnterToContinue();
capturer_->Stop();
- send_stream_->Stop();
+ video_send_stream_->Stop();
receive_stream->Stop();
call->DestroyVideoReceiveStream(receive_stream);
- call->DestroyVideoSendStream(send_stream_);
+ call->DestroyVideoSendStream(video_send_stream_);
transport.StopSending();
}
diff --git a/webrtc/video/video_quality_test.h b/webrtc/video/video_quality_test.h
index 7b62fb3dce..dd2b011cc3 100644
--- a/webrtc/video/video_quality_test.h
+++ b/webrtc/video/video_quality_test.h
@@ -11,6 +11,7 @@
#define WEBRTC_VIDEO_VIDEO_QUALITY_TEST_H_
#include <string>
+#include <vector>
#include "webrtc/test/call_test.h"
#include "webrtc/test/frame_generator.h"
@@ -33,11 +34,11 @@ class VideoQualityTest : public test::CallTest {
int target_bitrate_bps;
int max_bitrate_bps;
std::string codec;
- size_t num_temporal_layers;
+ int num_temporal_layers;
+ int selected_tl;
int min_transmit_bps;
Call::Config::BitrateConfig call_bitrate_config;
- size_t tl_discard_threshold;
bool send_side_bwe;
} common;
struct { // Video-specific settings.
@@ -50,30 +51,56 @@ class VideoQualityTest : public test::CallTest {
} screenshare;
struct { // Analyzer settings.
std::string test_label;
- double avg_psnr_threshold;
- double avg_ssim_threshold;
+ double avg_psnr_threshold; // (*)
+ double avg_ssim_threshold; // (*)
int test_durations_secs;
std::string graph_data_output_filename;
+ std::string graph_title;
} analyzer;
FakeNetworkPipe::Config pipe;
bool logs;
+ struct { // Spatial scalability.
+ std::vector<VideoStream> streams; // If empty, one stream is assumed.
+ size_t selected_stream;
+ int num_spatial_layers;
+ int selected_sl;
+ // If empty, bitrates are generated in VP9Impl automatically.
+ std::vector<SpatialLayer> spatial_layers;
+ } ss;
};
+ // (*) Set to -1.1 if generating graph data for simulcast or SVC and the
+ // selected stream/layer doesn't have the same resolution as the largest
+ // stream/layer (to ignore the PSNR and SSIM calculation errors).
VideoQualityTest();
void RunWithAnalyzer(const Params& params);
void RunWithVideoRenderer(const Params& params);
+ static void FillScalabilitySettings(
+ Params* params,
+ const std::vector<std::string>& stream_descriptors,
+ size_t selected_stream,
+ int num_spatial_layers,
+ int selected_sl,
+ const std::vector<std::string>& sl_descriptors);
+
protected:
// No-op implementation to be able to instantiate this class from non-TEST_F
// locations.
void TestBody() override;
- void CreateCapturer(const Params& params, VideoCaptureInput* input);
- void ValidateParams(const Params& params);
- void SetupFullStack(const Params& params,
- Transport* send_transport,
- Transport* recv_transport);
- void SetupScreenshare(const Params& params);
+ // Helper methods accessing only params_.
+ std::string GenerateGraphTitle() const;
+ void CheckParams();
+
+ // Helper static methods.
+ static VideoStream DefaultVideoStream(const Params& params);
+ static std::vector<int> ParseCSV(const std::string& str);
+
+ // Helper methods for setting up the call.
+ void CreateCapturer(VideoCaptureInput* input);
+ void SetupCommon(Transport* send_transport, Transport* recv_transport);
+ void SetupScreenshare();
// We need a more general capturer than the FrameGeneratorCapturer.
rtc::scoped_ptr<test::VideoCapturer> capturer_;
@@ -82,6 +109,8 @@ class VideoQualityTest : public test::CallTest {
rtc::scoped_ptr<VideoEncoder> encoder_;
VideoCodecUnion codec_settings_;
Clock* const clock_;
+
+ Params params_;
};
} // namespace webrtc
diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc
index f5cb357098..7779fddd56 100644
--- a/webrtc/video/video_receive_stream.cc
+++ b/webrtc/video/video_receive_stream.cc
@@ -12,6 +12,7 @@
#include <stdlib.h>
+#include <set>
#include <string>
#include "webrtc/base/checks.h"
@@ -19,8 +20,8 @@
#include "webrtc/call/congestion_controller.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/video/call_stats.h"
#include "webrtc/video/receive_statistics_proxy.h"
-#include "webrtc/video_engine/call_stats.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {
@@ -38,8 +39,6 @@ std::string VideoReceiveStream::Decoder::ToString() const {
ss << "{decoder: " << (decoder != nullptr ? "(VideoDecoder)" : "nullptr");
ss << ", payload_type: " << payload_type;
ss << ", payload_name: " << payload_name;
- ss << ", is_renderer: " << (is_renderer ? "yes" : "no");
- ss << ", expected_delay_ms: " << expected_delay_ms;
ss << '}';
return ss.str();
@@ -81,6 +80,7 @@ std::string VideoReceiveStream::Config::Rtp::ToString() const {
<< (rtcp_xr.receiver_reference_time_report ? "on" : "off");
ss << '}';
ss << ", remb: " << (remb ? "on" : "off");
+ ss << ", transport_cc: " << (transport_cc ? "on" : "off");
ss << ", nack: {rtp_history_ms: " << nack.rtp_history_ms << '}';
ss << ", fec: " << fec.ToString();
ss << ", rtx: {";
@@ -110,7 +110,7 @@ VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) {
memset(&codec, 0, sizeof(codec));
codec.plType = decoder.payload_type;
- strcpy(codec.plName, decoder.payload_name.c_str());
+ strncpy(codec.plName, decoder.payload_name.c_str(), sizeof(codec.plName));
if (decoder.payload_name == "VP8") {
codec.codecType = kVideoCodecVP8;
} else if (decoder.payload_name == "VP9") {
@@ -153,16 +153,15 @@ VideoReceiveStream::VideoReceiveStream(
call_stats_(call_stats) {
LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString();
- bool send_side_bwe = UseSendSideBwe(config_.rtp.extensions);
+ bool send_side_bwe =
+ config.rtp.transport_cc && UseSendSideBwe(config_.rtp.extensions);
RemoteBitrateEstimator* bitrate_estimator =
congestion_controller_->GetRemoteBitrateEstimator(send_side_bwe);
vie_channel_.reset(new ViEChannel(
num_cpu_cores, &transport_adapter_, process_thread, nullptr,
- congestion_controller_->GetBitrateController()->
- CreateRtcpBandwidthObserver(),
- nullptr, bitrate_estimator, call_stats_->rtcp_rtt_stats(),
+ nullptr, nullptr, bitrate_estimator, call_stats_->rtcp_rtt_stats(),
congestion_controller_->pacer(), congestion_controller_->packet_router(),
1, false));
@@ -228,7 +227,7 @@ VideoReceiveStream::VideoReceiveStream(
VideoCodec codec;
memset(&codec, 0, sizeof(codec));
codec.codecType = kVideoCodecULPFEC;
- strcpy(codec.plName, "ulpfec");
+ strncpy(codec.plName, "ulpfec", sizeof(codec.plName));
codec.plType = config_.rtp.fec.ulpfec_payload_type;
RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec));
}
@@ -236,7 +235,7 @@ VideoReceiveStream::VideoReceiveStream(
VideoCodec codec;
memset(&codec, 0, sizeof(codec));
codec.codecType = kVideoCodecRED;
- strcpy(codec.plName, "red");
+ strncpy(codec.plName, "red", sizeof(codec.plName));
codec.plType = config_.rtp.fec.red_payload_type;
RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec));
if (config_.rtp.fec.red_rtx_payload_type != -1) {
@@ -259,21 +258,27 @@ VideoReceiveStream::VideoReceiveStream(
vie_channel_->RegisterRtcpPacketTypeCounterObserver(stats_proxy_.get());
RTC_DCHECK(!config_.decoders.empty());
+ std::set<int> decoder_payload_types;
for (size_t i = 0; i < config_.decoders.size(); ++i) {
const Decoder& decoder = config_.decoders[i];
- RTC_CHECK_EQ(0,
- vie_channel_->RegisterExternalDecoder(
- decoder.payload_type, decoder.decoder, decoder.is_renderer,
- decoder.is_renderer ? decoder.expected_delay_ms
- : config.render_delay_ms));
+ RTC_CHECK(decoder.decoder);
+ RTC_CHECK(decoder_payload_types.find(decoder.payload_type) ==
+ decoder_payload_types.end())
+ << "Duplicate payload type (" << decoder.payload_type
+ << ") for different decoders.";
+ decoder_payload_types.insert(decoder.payload_type);
+ vie_channel_->RegisterExternalDecoder(decoder.payload_type,
+ decoder.decoder);
VideoCodec codec = CreateDecoderVideoCodec(decoder);
RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec));
}
- incoming_video_stream_.reset(new IncomingVideoStream(0));
+ incoming_video_stream_.reset(new IncomingVideoStream(
+ 0, config.renderer ? config.renderer->SmoothsRenderedFrames() : false));
incoming_video_stream_->SetExpectedRenderDelay(config.render_delay_ms);
+ vie_channel_->SetExpectedRenderDelay(config.render_delay_ms);
incoming_video_stream_->SetExternalCallback(this);
vie_channel_->SetIncomingVideoStream(incoming_video_stream_.get());
@@ -287,9 +292,6 @@ VideoReceiveStream::~VideoReceiveStream() {
vie_channel_->RegisterPreRenderCallback(nullptr);
vie_channel_->RegisterPreDecodeImageCallback(nullptr);
- for (size_t i = 0; i < config_.decoders.size(); ++i)
- vie_channel_->DeRegisterExternalDecoder(config_.decoders[i].payload_type);
-
call_stats_->DeregisterStatsObserver(vie_channel_->GetStatsObserver());
congestion_controller_->SetChannelRembStatus(false, false,
vie_channel_->rtp_rtcp());
diff --git a/webrtc/video/video_receive_stream.h b/webrtc/video/video_receive_stream.h
index 7b1f5394f1..0ff5269bb5 100644
--- a/webrtc/video/video_receive_stream.h
+++ b/webrtc/video/video_receive_stream.h
@@ -16,15 +16,15 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
-#include "webrtc/common_video/interface/incoming_video_stream.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/video/receive_statistics_proxy.h"
+#include "webrtc/video/vie_channel.h"
+#include "webrtc/video/vie_encoder.h"
#include "webrtc/video_encoder.h"
-#include "webrtc/video_engine/vie_channel.h"
-#include "webrtc/video_engine/vie_encoder.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index 4ec923f788..656d551794 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -20,19 +20,18 @@
#include "webrtc/base/trace_event.h"
#include "webrtc/call/congestion_controller.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/video/call_stats.h"
+#include "webrtc/video/encoder_state_feedback.h"
+#include "webrtc/video/payload_router.h"
#include "webrtc/video/video_capture_input.h"
-#include "webrtc/video_engine/call_stats.h"
-#include "webrtc/video_engine/encoder_state_feedback.h"
-#include "webrtc/video_engine/payload_router.h"
-#include "webrtc/video_engine/vie_channel.h"
-#include "webrtc/video_engine/vie_defines.h"
-#include "webrtc/video_engine/vie_encoder.h"
+#include "webrtc/video/vie_channel.h"
+#include "webrtc/video/vie_encoder.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
-class BitrateAllocator;
class PacedSender;
class RtcpIntraFrameObserver;
class TransportFeedbackObserver;
@@ -98,7 +97,7 @@ std::string VideoSendStream::Config::ToString() const {
ss << ", post_encode_callback: " << (post_encode_callback != nullptr
? "(EncodedFrameObserver)"
: "nullptr");
- ss << "local_renderer: " << (local_renderer != nullptr ? "(VideoRenderer)"
+ ss << ", local_renderer: " << (local_renderer != nullptr ? "(VideoRenderer)"
: "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
ss << ", target_delay_ms: " << target_delay_ms;
@@ -114,10 +113,14 @@ VideoSendStream::VideoSendStream(
ProcessThread* module_process_thread,
CallStats* call_stats,
CongestionController* congestion_controller,
+ BitrateAllocator* bitrate_allocator,
const VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config,
const std::map<uint32_t, RtpState>& suspended_ssrcs)
- : transport_adapter_(config.send_transport),
+ : stats_proxy_(Clock::GetRealTimeClock(),
+ config,
+ encoder_config.content_type),
+ transport_adapter_(config.send_transport),
encoded_frame_proxy_(config.post_encode_callback),
config_(config),
suspended_ssrcs_(suspended_ssrcs),
@@ -125,8 +128,7 @@ VideoSendStream::VideoSendStream(
call_stats_(call_stats),
congestion_controller_(congestion_controller),
encoder_feedback_(new EncoderStateFeedback()),
- use_config_bitrate_(true),
- stats_proxy_(Clock::GetRealTimeClock(), config) {
+ use_config_bitrate_(true) {
LOG(LS_INFO) << "VideoSendStream: " << config_.ToString();
RTC_DCHECK(!config_.rtp.ssrcs.empty());
@@ -145,7 +147,7 @@ VideoSendStream::VideoSendStream(
vie_encoder_.reset(new ViEEncoder(
num_cpu_cores, module_process_thread_, &stats_proxy_,
config.pre_encode_callback, congestion_controller_->pacer(),
- congestion_controller_->bitrate_allocator()));
+ bitrate_allocator));
RTC_CHECK(vie_encoder_->Init());
vie_channel_.reset(new ViEChannel(
@@ -197,8 +199,8 @@ VideoSendStream::VideoSendStream(
vie_channel_->SetProtectionMode(enable_protection_nack, enable_protection_fec,
config_.rtp.fec.red_payload_type,
config_.rtp.fec.ulpfec_payload_type);
- vie_encoder_->UpdateProtectionMethod(enable_protection_nack,
- enable_protection_fec);
+ vie_encoder_->SetProtectionMethod(enable_protection_nack,
+ enable_protection_fec);
ConfigureSsrcs();
@@ -346,6 +348,12 @@ bool VideoSendStream::ReconfigureVideoEncoder(
if (config.encoder_specific_settings != nullptr) {
video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
config.encoder_specific_settings);
+ if (video_codec.mode == kScreensharing) {
+ video_codec.codecSpecific.VP9.flexibleMode = true;
+ // For now VP9 screensharing use 1 temporal and 2 spatial layers.
+ RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, 1);
+ RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
+ }
}
video_codec.codecSpecific.VP9.numberOfTemporalLayers =
static_cast<unsigned char>(
@@ -370,6 +378,16 @@ bool VideoSendStream::ReconfigureVideoEncoder(
static_cast<unsigned char>(streams.size());
video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
+ if (video_codec.codecType == kVideoCodecVP9) {
+ // If the vector is empty, bitrates will be configured automatically.
+ RTC_DCHECK(config.spatial_layers.empty() ||
+ config.spatial_layers.size() ==
+ video_codec.codecSpecific.VP9.numberOfSpatialLayers);
+ RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
+ kMaxSimulcastStreams);
+ for (size_t i = 0; i < config.spatial_layers.size(); ++i)
+ video_codec.spatialLayers[i] = config.spatial_layers[i];
+ }
for (size_t i = 0; i < streams.size(); ++i) {
SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
RTC_DCHECK_GT(streams[i].width, 0u);
@@ -382,8 +400,8 @@ bool VideoSendStream::ReconfigureVideoEncoder(
RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
RTC_DCHECK_GE(streams[i].max_qp, 0);
- sim_stream->width = static_cast<unsigned short>(streams[i].width);
- sim_stream->height = static_cast<unsigned short>(streams[i].height);
+ sim_stream->width = static_cast<uint16_t>(streams[i].width);
+ sim_stream->height = static_cast<uint16_t>(streams[i].height);
sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
@@ -392,12 +410,12 @@ bool VideoSendStream::ReconfigureVideoEncoder(
streams[i].temporal_layer_thresholds_bps.size() + 1);
video_codec.width = std::max(video_codec.width,
- static_cast<unsigned short>(streams[i].width));
+ static_cast<uint16_t>(streams[i].width));
video_codec.height = std::max(
- video_codec.height, static_cast<unsigned short>(streams[i].height));
+ video_codec.height, static_cast<uint16_t>(streams[i].height));
video_codec.minBitrate =
- std::min(video_codec.minBitrate,
- static_cast<unsigned int>(streams[i].min_bitrate_bps / 1000));
+ std::min(static_cast<uint16_t>(video_codec.minBitrate),
+ static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
video_codec.qpMax = std::max(video_codec.qpMax,
static_cast<unsigned int>(streams[i].max_qp));
@@ -419,6 +437,8 @@ bool VideoSendStream::ReconfigureVideoEncoder(
stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]);
}
+ stats_proxy_.SetContentType(config.content_type);
+
RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
vie_encoder_->SetMinTransmitBitrate(config.min_transmit_bitrate_bps / 1000);
@@ -480,7 +500,7 @@ std::map<uint32_t, RtpState> VideoSendStream::GetRtpStates() const {
std::map<uint32_t, RtpState> rtp_states;
for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
uint32_t ssrc = config_.rtp.ssrcs[i];
- rtp_states[ssrc] = vie_channel_->GetRtpStateForSsrc( ssrc);
+ rtp_states[ssrc] = vie_channel_->GetRtpStateForSsrc(ssrc);
}
for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
@@ -496,7 +516,7 @@ void VideoSendStream::SignalNetworkState(NetworkState state) {
// When it goes down, disable RTCP afterwards. This ensures that any packets
// sent due to the network state changed will not be dropped.
if (state == kNetworkUp)
- vie_channel_->SetRTCPMode(RtcpMode::kCompound);
+ vie_channel_->SetRTCPMode(config_.rtp.rtcp_mode);
vie_encoder_->SetNetworkTransmissionState(state == kNetworkUp);
if (state == kNetworkDown)
vie_channel_->SetRTCPMode(RtcpMode::kOff);
@@ -517,7 +537,12 @@ int64_t VideoSendStream::GetRtt() const {
return -1;
}
+int VideoSendStream::GetPaddingNeededBps() const {
+ return vie_encoder_->GetPaddingNeededBps();
+}
+
bool VideoSendStream::SetSendCodec(VideoCodec video_codec) {
+ static const int kEncoderMinBitrate = 30;
if (video_codec.maxBitrate == 0) {
// Unset max bitrate -> cap to one bit per pixel.
video_codec.maxBitrate =
@@ -525,10 +550,10 @@ bool VideoSendStream::SetSendCodec(VideoCodec video_codec) {
1000;
}
- if (video_codec.minBitrate < kViEMinCodecBitrate)
- video_codec.minBitrate = kViEMinCodecBitrate;
- if (video_codec.maxBitrate < kViEMinCodecBitrate)
- video_codec.maxBitrate = kViEMinCodecBitrate;
+ if (video_codec.minBitrate < kEncoderMinBitrate)
+ video_codec.minBitrate = kEncoderMinBitrate;
+ if (video_codec.maxBitrate < kEncoderMinBitrate)
+ video_codec.maxBitrate = kEncoderMinBitrate;
// Stop the media flow while reconfiguring.
vie_encoder_->Pause();
@@ -547,14 +572,8 @@ bool VideoSendStream::SetSendCodec(VideoCodec video_codec) {
// to send on all SSRCs at once etc.)
std::vector<uint32_t> used_ssrcs = config_.rtp.ssrcs;
used_ssrcs.resize(static_cast<size_t>(video_codec.numberOfSimulcastStreams));
-
- // Update used SSRCs.
vie_encoder_->SetSsrcs(used_ssrcs);
- // Update the protection mode, we might be switching NACK/FEC.
- vie_encoder_->UpdateProtectionMethod(vie_encoder_->nack_enabled(),
- vie_channel_->IsSendingFecEnabled());
-
// Restart the media flow
vie_encoder_->Restart();
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index 88c1611915..64b7fceaf3 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -17,7 +17,7 @@
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/video/send_statistics_proxy.h"
@@ -27,6 +27,7 @@
namespace webrtc {
+class BitrateAllocator;
class CallStats;
class CongestionController;
class EncoderStateFeedback;
@@ -43,6 +44,7 @@ class VideoSendStream : public webrtc::VideoSendStream,
ProcessThread* module_process_thread,
CallStats* call_stats,
CongestionController* congestion_controller,
+ BitrateAllocator* bitrate_allocator,
const VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config,
const std::map<uint32_t, RtpState>& suspended_ssrcs);
@@ -68,10 +70,13 @@ class VideoSendStream : public webrtc::VideoSendStream,
RtpStateMap GetRtpStates() const;
int64_t GetRtt() const;
+ int GetPaddingNeededBps() const;
private:
bool SetSendCodec(VideoCodec video_codec);
void ConfigureSsrcs();
+
+ SendStatisticsProxy stats_proxy_;
TransportAdapter transport_adapter_;
EncodedFrameCallbackAdapter encoded_frame_proxy_;
const VideoSendStream::Config config_;
@@ -91,8 +96,6 @@ class VideoSendStream : public webrtc::VideoSendStream,
// start bitrate initially, instead of the one reported by VideoEngine (which
// defaults to too high).
bool use_config_bitrate_;
-
- SendStatisticsProxy stats_proxy_;
};
} // namespace internal
} // namespace webrtc
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index 59011a6162..4cf92748a8 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -15,22 +15,22 @@
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
#include "webrtc/frame_callback.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/ref_count.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/test/call_test.h"
#include "webrtc/test/configurable_frame_size_encoder.h"
#include "webrtc/test/fake_texture_frame.h"
@@ -58,6 +58,9 @@ class VideoSendStreamTest : public test::CallTest {
void TestNackRetransmission(uint32_t retransmit_ssrc,
uint8_t retransmit_payload_type);
void TestPacketFragmentationSize(VideoFormat format, bool with_fec);
+
+ void TestVp9NonFlexMode(uint8_t num_temporal_layers,
+ uint8_t num_spatial_layers);
};
TEST_F(VideoSendStreamTest, CanStartStartedStream) {
@@ -65,10 +68,10 @@ TEST_F(VideoSendStreamTest, CanStartStartedStream) {
CreateSenderCall(call_config);
test::NullTransport transport;
- CreateSendConfig(1, &transport);
- CreateStreams();
- send_stream_->Start();
- send_stream_->Start();
+ CreateSendConfig(1, 0, &transport);
+ CreateVideoStreams();
+ video_send_stream_->Start();
+ video_send_stream_->Start();
DestroyStreams();
}
@@ -77,10 +80,10 @@ TEST_F(VideoSendStreamTest, CanStopStoppedStream) {
CreateSenderCall(call_config);
test::NullTransport transport;
- CreateSendConfig(1, &transport);
- CreateStreams();
- send_stream_->Stop();
- send_stream_->Stop();
+ CreateSendConfig(1, 0, &transport);
+ CreateVideoStreams();
+ video_send_stream_->Stop();
+ video_send_stream_->Stop();
DestroyStreams();
}
@@ -99,7 +102,7 @@ TEST_F(VideoSendStreamTest, SupportsCName) {
while (packet_type != RTCPUtility::RTCPPacketTypes::kInvalid) {
if (packet_type == RTCPUtility::RTCPPacketTypes::kSdesChunk) {
EXPECT_EQ(parser.Packet().CName.CName, kCName);
- observation_complete_->Set();
+ observation_complete_.Set();
}
packet_type = parser.Iterate();
@@ -108,28 +111,27 @@ TEST_F(VideoSendStreamTest, SupportsCName) {
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.c_name = kCName;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for RTCP with CNAME.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for RTCP with CNAME.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
- static const uint8_t kAbsSendTimeExtensionId = 13;
class AbsoluteSendTimeObserver : public test::SendTest {
public:
AbsoluteSendTimeObserver() : SendTest(kDefaultTimeoutMs) {
EXPECT_TRUE(parser_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsSendTimeExtensionId));
+ kRtpExtensionAbsoluteSendTime, test::kAbsSendTimeExtensionId));
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
@@ -140,26 +142,26 @@ TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
EXPECT_TRUE(header.extension.hasAbsoluteSendTime);
EXPECT_EQ(header.extension.transmissionTimeOffset, 0);
EXPECT_GT(header.extension.absoluteSendTime, 0u);
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.extensions.clear();
- send_config->rtp.extensions.push_back(
- RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId));
+ send_config->rtp.extensions.push_back(RtpExtension(
+ RtpExtension::kAbsSendTime, test::kAbsSendTimeExtensionId));
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for single RTP packet.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
@@ -182,14 +184,15 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
EXPECT_FALSE(header.extension.hasAbsoluteSendTime);
EXPECT_GT(header.extension.transmissionTimeOffset, 0);
EXPECT_EQ(header.extension.absoluteSendTime, 0u);
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = &encoder_;
send_config->rtp.extensions.clear();
send_config->rtp.extensions.push_back(
@@ -197,14 +200,13 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for a single RTP packet.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for a single RTP packet.";
}
test::DelayedEncoder encoder_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) {
@@ -226,14 +228,15 @@ TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) {
EXPECT_FALSE(header.extension.hasTransmissionTimeOffset);
EXPECT_FALSE(header.extension.hasAbsoluteSendTime);
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = &encoder_;
send_config->rtp.extensions.clear();
send_config->rtp.extensions.push_back(
@@ -241,14 +244,13 @@ TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for a single RTP packet.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for a single RTP packet.";
}
test::FakeEncoder encoder_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
class FakeReceiveStatistics : public NullReceiveStatistics {
@@ -307,83 +309,127 @@ class FakeReceiveStatistics : public NullReceiveStatistics {
StatisticianMap stats_map_;
};
-TEST_F(VideoSendStreamTest, SupportsFec) {
- class FecObserver : public test::SendTest {
- public:
- FecObserver()
- : SendTest(kDefaultTimeoutMs),
- send_count_(0),
- received_media_(false),
- received_fec_(false) {
- }
+class FecObserver : public test::SendTest {
+ public:
+ explicit FecObserver(bool header_extensions_enabled)
+ : SendTest(VideoSendStreamTest::kDefaultTimeoutMs),
+ send_count_(0),
+ received_media_(false),
+ received_fec_(false),
+ header_extensions_enabled_(header_extensions_enabled) {}
- private:
- Action OnSendRtp(const uint8_t* packet, size_t length) override {
- RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, length, &header));
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
- // Send lossy receive reports to trigger FEC enabling.
- if (send_count_++ % 2 != 0) {
- // Receive statistics reporting having lost 50% of the packets.
- FakeReceiveStatistics lossy_receive_stats(
- kSendSsrcs[0], header.sequenceNumber, send_count_ / 2, 127);
- RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(),
- &lossy_receive_stats, nullptr,
- transport_adapter_.get());
+ // Send lossy receive reports to trigger FEC enabling.
+ if (send_count_++ % 2 != 0) {
+ // Receive statistics reporting having lost 50% of the packets.
+ FakeReceiveStatistics lossy_receive_stats(
+ VideoSendStreamTest::kVideoSendSsrcs[0], header.sequenceNumber,
+ send_count_ / 2, 127);
+ RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(),
+ &lossy_receive_stats, nullptr,
+ transport_adapter_.get());
- rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
+ rtcp_sender.SetRemoteSSRC(VideoSendStreamTest::kVideoSendSsrcs[0]);
- RTCPSender::FeedbackState feedback_state;
+ RTCPSender::FeedbackState feedback_state;
- EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
- }
+ EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
+ }
- int encapsulated_payload_type = -1;
- if (header.payloadType == kRedPayloadType) {
- encapsulated_payload_type =
- static_cast<int>(packet[header.headerLength]);
- if (encapsulated_payload_type != kFakeSendPayloadType)
- EXPECT_EQ(kUlpfecPayloadType, encapsulated_payload_type);
+ int encapsulated_payload_type = -1;
+ if (header.payloadType == VideoSendStreamTest::kRedPayloadType) {
+ encapsulated_payload_type = static_cast<int>(packet[header.headerLength]);
+ if (encapsulated_payload_type !=
+ VideoSendStreamTest::kFakeVideoSendPayloadType)
+ EXPECT_EQ(VideoSendStreamTest::kUlpfecPayloadType,
+ encapsulated_payload_type);
+ } else {
+ EXPECT_EQ(VideoSendStreamTest::kFakeVideoSendPayloadType,
+ header.payloadType);
+ }
+
+ if (header_extensions_enabled_) {
+ EXPECT_TRUE(header.extension.hasAbsoluteSendTime);
+ uint32_t kHalf24BitsSpace = 0xFFFFFF / 2;
+ if (header.extension.absoluteSendTime <= kHalf24BitsSpace &&
+ prev_header_.extension.absoluteSendTime > kHalf24BitsSpace) {
+ // 24 bits wrap.
+ EXPECT_GT(prev_header_.extension.absoluteSendTime,
+ header.extension.absoluteSendTime);
} else {
- EXPECT_EQ(kFakeSendPayloadType, header.payloadType);
+ EXPECT_GE(header.extension.absoluteSendTime,
+ prev_header_.extension.absoluteSendTime);
}
+ EXPECT_TRUE(header.extension.hasTransportSequenceNumber);
+ uint16_t seq_num_diff = header.extension.transportSequenceNumber -
+ prev_header_.extension.transportSequenceNumber;
+ EXPECT_EQ(1, seq_num_diff);
+ }
- if (encapsulated_payload_type != -1) {
- if (encapsulated_payload_type == kUlpfecPayloadType) {
- received_fec_ = true;
- } else {
- received_media_ = true;
- }
+ if (encapsulated_payload_type != -1) {
+ if (encapsulated_payload_type ==
+ VideoSendStreamTest::kUlpfecPayloadType) {
+ received_fec_ = true;
+ } else {
+ received_media_ = true;
}
+ }
- if (received_media_ && received_fec_)
- observation_complete_->Set();
+ if (received_media_ && received_fec_ && send_count_ > 100)
+ observation_complete_.Set();
- return SEND_PACKET;
- }
+ prev_header_ = header;
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
- transport_adapter_.reset(
- new internal::TransportAdapter(send_config->send_transport));
- transport_adapter_->Enable();
- send_config->rtp.fec.red_payload_type = kRedPayloadType;
- send_config->rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
- }
+ return SEND_PACKET;
+ }
- void PerformTest() override {
- EXPECT_TRUE(Wait()) << "Timed out waiting for FEC and media packets.";
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ transport_adapter_.reset(
+ new internal::TransportAdapter(send_config->send_transport));
+ transport_adapter_->Enable();
+ send_config->rtp.fec.red_payload_type =
+ VideoSendStreamTest::kRedPayloadType;
+ send_config->rtp.fec.ulpfec_payload_type =
+ VideoSendStreamTest::kUlpfecPayloadType;
+ if (header_extensions_enabled_) {
+ send_config->rtp.extensions.push_back(RtpExtension(
+ RtpExtension::kAbsSendTime, test::kAbsSendTimeExtensionId));
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber,
+ test::kTransportSequenceNumberExtensionId));
}
+ }
- rtc::scoped_ptr<internal::TransportAdapter> transport_adapter_;
- int send_count_;
- bool received_media_;
- bool received_fec_;
- } test;
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out waiting for FEC and media packets.";
+ }
+
+ rtc::scoped_ptr<internal::TransportAdapter> transport_adapter_;
+ int send_count_;
+ bool received_media_;
+ bool received_fec_;
+ bool header_extensions_enabled_;
+ RTPHeader prev_header_;
+};
+
+TEST_F(VideoSendStreamTest, SupportsFecWithExtensions) {
+ FecObserver test(true);
+
+ RunBaseTest(&test);
+}
+
+TEST_F(VideoSendStreamTest, SupportsFecWithoutExtensions) {
+ FecObserver test(false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
void VideoSendStreamTest::TestNackRetransmission(
@@ -414,7 +460,7 @@ void VideoSendStreamTest::TestNackRetransmission(
nullptr, transport_adapter_.get());
rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
RTCPSender::FeedbackState feedback_state;
@@ -426,8 +472,8 @@ void VideoSendStreamTest::TestNackRetransmission(
uint16_t sequence_number = header.sequenceNumber;
if (header.ssrc == retransmit_ssrc_ &&
- retransmit_ssrc_ != kSendSsrcs[0]) {
- // Not kSendSsrcs[0], assume correct RTX packet. Extract sequence
+ retransmit_ssrc_ != kVideoSendSsrcs[0]) {
+ // Not kVideoSendSsrcs[0], assume correct RTX packet. Extract sequence
// number.
const uint8_t* rtx_header = packet + header.headerLength;
sequence_number = (rtx_header[0] << 8) + rtx_header[1];
@@ -436,27 +482,27 @@ void VideoSendStreamTest::TestNackRetransmission(
if (sequence_number == nacked_sequence_number_) {
EXPECT_EQ(retransmit_ssrc_, header.ssrc);
EXPECT_EQ(retransmit_payload_type_, header.payloadType);
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
transport_adapter_.reset(
new internal::TransportAdapter(send_config->send_transport));
transport_adapter_->Enable();
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
send_config->rtp.rtx.payload_type = retransmit_payload_type_;
- if (retransmit_ssrc_ != kSendSsrcs[0])
+ if (retransmit_ssrc_ != kVideoSendSsrcs[0])
send_config->rtp.rtx.ssrcs.push_back(retransmit_ssrc_);
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for NACK retransmission.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for NACK retransmission.";
}
rtc::scoped_ptr<internal::TransportAdapter> transport_adapter_;
@@ -466,12 +512,12 @@ void VideoSendStreamTest::TestNackRetransmission(
int nacked_sequence_number_;
} test(retransmit_ssrc, retransmit_payload_type);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, RetransmitsNack) {
// Normal NACKs should use the send SSRC.
- TestNackRetransmission(kSendSsrcs[0], kFakeSendPayloadType);
+ TestNackRetransmission(kVideoSendSsrcs[0], kFakeVideoSendPayloadType);
}
TEST_F(VideoSendStreamTest, RetransmitsNackOverRtx) {
@@ -572,7 +618,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
accumulated_payload_ = 0;
if (current_size_rtp_ == stop_size_) {
// Done! (Don't increase size again, might arrive more @ stop_size).
- observation_complete_->Set();
+ observation_complete_.Set();
} else {
// Increase next expected frame size. If testing with FEC, make sure
// a FEC packet has been received for this frame size before
@@ -596,13 +642,13 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
if (packet_count_++ % 2 != 0) {
// Receive statistics reporting having lost 50% of the packets.
FakeReceiveStatistics lossy_receive_stats(
- kSendSsrcs[0], header.sequenceNumber, packet_count_ / 2, 127);
+ kVideoSendSsrcs[0], header.sequenceNumber, packet_count_ / 2, 127);
RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(),
&lossy_receive_stats, nullptr,
transport_adapter_.get());
rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
RTCPSender::FeedbackState feedback_state;
@@ -627,9 +673,10 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
transport_adapter_.reset(
new internal::TransportAdapter(send_config->send_transport));
transport_adapter_->Enable();
@@ -651,8 +698,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while observing incoming RTP packets.";
+ EXPECT_TRUE(Wait()) << "Timed out while observing incoming RTP packets.";
}
rtc::scoped_ptr<internal::TransportAdapter> transport_adapter_;
@@ -677,7 +723,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
FrameFragmentationTest test(
kMaxPacketSize, start, stop, format == kGeneric, with_fec);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
// TODO(sprang): Is there any way of speeding up these tests?
@@ -752,7 +798,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
VideoSendStream::Stats stats = stream_->GetStats();
if (stats.suspended == false) {
// Stats flipped to false. Test is complete.
- observation_complete_->Set();
+ observation_complete_.Set();
}
SendRtcpFeedback(0); // REMB is only sent if value is > 0.
}
@@ -782,15 +828,16 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
high_remb_bps_ = value;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
transport_adapter_.reset(
new internal::TransportAdapter(send_config->send_transport));
transport_adapter_->Enable();
@@ -806,8 +853,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out during suspend-below-min-bitrate test.";
+ EXPECT_TRUE(Wait()) << "Timed out during suspend-below-min-bitrate test.";
}
enum TestState {
@@ -819,13 +865,13 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
virtual void SendRtcpFeedback(int remb_value)
EXCLUSIVE_LOCKS_REQUIRED(crit_) {
- FakeReceiveStatistics receive_stats(
- kSendSsrcs[0], last_sequence_number_, rtp_count_, 0);
+ FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0],
+ last_sequence_number_, rtp_count_, 0);
RTCPSender rtcp_sender(false, clock_, &receive_stats, nullptr,
transport_adapter_.get());
rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
if (remb_value > 0) {
rtcp_sender.SetREMBStatus(true);
rtcp_sender.SetREMBData(remb_value, std::vector<uint32_t>());
@@ -847,7 +893,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
int high_remb_bps_ GUARDED_BY(crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
@@ -874,14 +920,14 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
if (last_packet_time_ms_ > 0 &&
clock_->TimeInMilliseconds() - last_packet_time_ms_ >
kVideoMutedThresholdMs)
- observation_complete_->Set();
+ observation_complete_.Set();
// Receive statistics reporting having lost 50% of the packets.
- FakeReceiveStatistics receive_stats(kSendSsrcs[0], 1, 1, 0);
+ FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0], 1, 1, 0);
RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(), &receive_stats,
nullptr, transport_adapter_.get());
rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
RTCPSender::FeedbackState feedback_state;
@@ -889,15 +935,16 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
return SEND_PACKET;
}
- void OnTransportsCreated(
- test::PacketTransport* send_transport,
- test::PacketTransport* receive_transport) override {
- transport_adapter_.reset(
- new internal::TransportAdapter(receive_transport));
+ test::PacketTransport* CreateReceiveTransport() override {
+ test::PacketTransport* transport = new test::PacketTransport(
+ nullptr, this, test::PacketTransport::kReceiver,
+ FakeNetworkPipe::Config());
+ transport_adapter_.reset(new internal::TransportAdapter(transport));
transport_adapter_->Enable();
+ return transport;
}
- size_t GetNumStreams() const override { return 3; }
+ size_t GetNumVideoStreams() const override { return 3; }
virtual void OnFrameGeneratorCapturerCreated(
test::FrameGeneratorCapturer* frame_generator_capturer) {
@@ -906,7 +953,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for RTP packets to stop being sent.";
}
@@ -917,7 +964,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
test::FrameGeneratorCapturer* capturer_ GUARDED_BY(crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
// This test first observes "high" bitrate use at which point it sends a REMB to
@@ -966,14 +1013,14 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
bitrate_capped_ = true;
} else if (bitrate_capped_ &&
total_bitrate_bps < kRembRespectedBitrateBps) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
// Packets don't have to be delivered since the test is the receiver.
return DROP_PACKET;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
@@ -984,9 +1031,10 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize);
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
feedback_transport_.reset(
new internal::TransportAdapter(send_config->send_transport));
feedback_transport_->Enable();
@@ -994,7 +1042,7 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timeout while waiting for low bitrate stats after REMB.";
}
@@ -1004,7 +1052,7 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
bool bitrate_capped_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
@@ -1039,24 +1087,24 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
CreateSenderCall(Call::Config());
test::NullTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
Call::Config::BitrateConfig bitrate_config;
bitrate_config.start_bitrate_bps =
- 2 * encoder_config_.streams[0].max_bitrate_bps;
+ 2 * video_encoder_config_.streams[0].max_bitrate_bps;
sender_call_->SetBitrateConfig(bitrate_config);
StartBitrateObserver encoder;
- send_config_.encoder_settings.encoder = &encoder;
+ video_send_config_.encoder_settings.encoder = &encoder;
- CreateStreams();
+ CreateVideoStreams();
- EXPECT_EQ(encoder_config_.streams[0].max_bitrate_bps / 1000,
+ EXPECT_EQ(video_encoder_config_.streams[0].max_bitrate_bps / 1000,
encoder.GetStartBitrateKbps());
- encoder_config_.streams[0].max_bitrate_bps =
+ video_encoder_config_.streams[0].max_bitrate_bps =
2 * bitrate_config.start_bitrate_bps;
- send_stream_->ReconfigureVideoEncoder(encoder_config_);
+ video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
// New bitrate should be reconfigured above the previous max. As there's no
// network connection this shouldn't be flaky, as no bitrate should've been
@@ -1070,16 +1118,16 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
class FrameObserver : public I420FrameCallback {
public:
- FrameObserver() : output_frame_event_(EventWrapper::Create()) {}
+ FrameObserver() : output_frame_event_(false, false) {}
void FrameCallback(VideoFrame* video_frame) override {
output_frames_.push_back(*video_frame);
- output_frame_event_->Set();
+ output_frame_event_.Set();
}
void WaitOutputFrame() {
- const unsigned long kWaitFrameTimeoutMs = 3000;
- EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(kWaitFrameTimeoutMs))
+ const int kWaitFrameTimeoutMs = 3000;
+ EXPECT_TRUE(output_frame_event_.Wait(kWaitFrameTimeoutMs))
<< "Timeout while waiting for output frames.";
}
@@ -1092,46 +1140,46 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
std::vector<VideoFrame> output_frames_;
// Indicate an output frame has arrived.
- rtc::scoped_ptr<EventWrapper> output_frame_event_;
+ rtc::Event output_frame_event_;
};
// Initialize send stream.
CreateSenderCall(Call::Config());
test::NullTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
FrameObserver observer;
- send_config_.pre_encode_callback = &observer;
- CreateStreams();
+ video_send_config_.pre_encode_callback = &observer;
+ CreateVideoStreams();
// Prepare five input frames. Send ordinary VideoFrame and texture frames
// alternatively.
std::vector<VideoFrame> input_frames;
- int width = static_cast<int>(encoder_config_.streams[0].width);
- int height = static_cast<int>(encoder_config_.streams[0].height);
+ int width = static_cast<int>(video_encoder_config_.streams[0].width);
+ int height = static_cast<int>(video_encoder_config_.streams[0].height);
test::FakeNativeHandle* handle1 = new test::FakeNativeHandle();
test::FakeNativeHandle* handle2 = new test::FakeNativeHandle();
test::FakeNativeHandle* handle3 = new test::FakeNativeHandle();
- input_frames.push_back(test::CreateFakeNativeHandleFrame(
+ input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle1, width, height, 1, 1, kVideoRotation_0));
- input_frames.push_back(test::CreateFakeNativeHandleFrame(
+ input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle2, width, height, 2, 2, kVideoRotation_0));
input_frames.push_back(CreateVideoFrame(width, height, 3));
input_frames.push_back(CreateVideoFrame(width, height, 4));
- input_frames.push_back(test::CreateFakeNativeHandleFrame(
+ input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle3, width, height, 5, 5, kVideoRotation_0));
- send_stream_->Start();
+ video_send_stream_->Start();
for (size_t i = 0; i < input_frames.size(); i++) {
- send_stream_->Input()->IncomingCapturedFrame(input_frames[i]);
+ video_send_stream_->Input()->IncomingCapturedFrame(input_frames[i]);
// Do not send the next frame too fast, so the frame dropper won't drop it.
if (i < input_frames.size() - 1)
- SleepMs(1000 / encoder_config_.streams[0].max_framerate);
+ SleepMs(1000 / video_encoder_config_.streams[0].max_framerate);
// Wait until the output frame is received before sending the next input
// frame. Or the previous input frame may be replaced without delivering.
observer.WaitOutputFrame();
}
- send_stream_->Stop();
+ video_send_stream_->Stop();
// Test if the input and output frames are the same. render_time_ms and
// timestamp are not compared because capturer sets those values.
@@ -1240,7 +1288,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
const std::vector<FrameType>* frame_types) override {
EXPECT_TRUE(IsReadyForEncode());
- observation_complete_->Set();
+ observation_complete_.Set();
return 0;
}
@@ -1273,7 +1321,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
return 0;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
// Encoder initialization should be done in stream construction before
@@ -1282,16 +1330,16 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
stream_ = send_stream;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
encoder_config_ = *encoder_config;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for Encode.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode.";
EXPECT_EQ(0u, num_releases());
stream_->ReconfigureVideoEncoder(encoder_config_);
EXPECT_EQ(0u, num_releases());
@@ -1301,8 +1349,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
EXPECT_TRUE(IsReadyForEncode());
stream_->Start();
// Sanity check, make sure we still encode frames with this encoder.
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for Encode.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode.";
}
rtc::CriticalSection crit_;
@@ -1314,7 +1361,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
VideoEncoderConfig encoder_config_;
} test_encoder;
- RunBaseTest(&test_encoder, FakeNetworkPipe::Config());
+ RunBaseTest(&test_encoder);
EXPECT_TRUE(test_encoder.IsReleased());
EXPECT_EQ(1u, test_encoder.num_releases());
@@ -1330,14 +1377,15 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
num_initializations_(0) {}
private:
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
encoder_config_ = *encoder_config;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
@@ -1372,14 +1420,13 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
VideoEncoderConfig encoder_config_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
static const size_t kVideoCodecConfigObserverNumberOfTemporalLayers = 4;
template <typename T>
class VideoCodecConfigObserver : public test::SendTest,
public test::FakeEncoder {
-
public:
VideoCodecConfigObserver(VideoCodecType video_codec_type,
const char* codec_name)
@@ -1392,9 +1439,10 @@ class VideoCodecConfigObserver : public test::SendTest,
}
private:
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
send_config->encoder_settings.payload_name = codec_name_;
@@ -1407,7 +1455,7 @@ class VideoCodecConfigObserver : public test::SendTest,
encoder_config_ = *encoder_config;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
@@ -1500,17 +1548,17 @@ void VideoCodecConfigObserver<VideoCodecVP9>::VerifyCodecSpecifics(
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp8Config) {
VideoCodecConfigObserver<VideoCodecVP8> test(kVideoCodecVP8, "VP8");
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) {
VideoCodecConfigObserver<VideoCodecVP9> test(kVideoCodecVP9, "VP9");
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesH264Config) {
VideoCodecConfigObserver<VideoCodecH264> test(kVideoCodecH264, "H264");
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
@@ -1522,6 +1570,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, length, &header));
++rtp_packets_sent_;
@@ -1530,6 +1579,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
RTCPUtility::RTCPParserV2 parser(packet, length, true);
EXPECT_TRUE(parser.IsValid());
@@ -1542,7 +1592,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
if (parser.Packet().SR.SenderOctetCount > 0 &&
parser.Packet().SR.SenderPacketCount == rtp_packets_sent_) {
EXPECT_EQ(media_bytes_sent_, parser.Packet().SR.SenderOctetCount);
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
packet_type = parser.Iterate();
@@ -1552,15 +1602,15 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for RTCP sender report.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for RTCP sender report.";
}
- size_t rtp_packets_sent_;
- size_t media_bytes_sent_;
+ rtc::CriticalSection crit_;
+ size_t rtp_packets_sent_ GUARDED_BY(&crit_);
+ size_t media_bytes_sent_ GUARDED_BY(&crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
@@ -1578,13 +1628,14 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
size_t max_payload_size) override {
EXPECT_EQ(static_cast<unsigned int>(kScreencastTargetBitrateKbps),
config->targetBitrate);
- observation_complete_->Set();
+ observation_complete_.Set();
return test::FakeEncoder::InitEncode(
config, number_of_cores, max_payload_size);
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
EXPECT_EQ(1u, encoder_config->streams.size());
EXPECT_TRUE(
@@ -1595,15 +1646,26 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for the encoder to be initialized.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
-TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
+// Disabled on LinuxAsan:
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5382
+#if defined(ADDRESS_SANITIZER) && defined(WEBRTC_LINUX)
+#define MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly \
+ DISABLED_ReconfigureBitratesSetsEncoderBitratesCorrectly
+#else
+#define MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly \
+ ReconfigureBitratesSetsEncoderBitratesCorrectly
+#endif
+
+TEST_F(VideoSendStreamTest,
+ MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly) {
// These are chosen to be "kind of odd" to not be accidentally checked against
// default values.
static const int kMinBitrateKbps = 137;
@@ -1631,7 +1693,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
codecSettings->startBitrate);
EXPECT_EQ(static_cast<unsigned int>(kMaxBitrateKbps),
codecSettings->maxBitrate);
- observation_complete_->Set();
+ observation_complete_.Set();
} else if (num_initializations_ == 1) {
EXPECT_EQ(static_cast<unsigned int>(kLowerMaxBitrateKbps),
codecSettings->maxBitrate);
@@ -1658,9 +1720,10 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
// Set bitrates lower/higher than min/max to make sure they are properly
// capped.
@@ -1673,7 +1736,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
call_ = sender_call;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
@@ -1684,7 +1747,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000;
bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
call_->SetBitrateConfig(bitrate_config);
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting encoder to be configured.";
encoder_config_.streams[0].min_bitrate_bps = 0;
encoder_config_.streams[0].max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
@@ -1706,7 +1769,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
webrtc::VideoEncoderConfig encoder_config_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, ReportsSentResolution) {
@@ -1747,36 +1810,37 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
return -1;
}
- observation_complete_->Set();
+ observation_complete_.Set();
return 0;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
EXPECT_EQ(kNumStreams, encoder_config->streams.size());
}
- size_t GetNumStreams() const override { return kNumStreams; }
+ size_t GetNumVideoStreams() const override { return kNumStreams; }
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for the encoder to send one frame.";
VideoSendStream::Stats stats = send_stream_->GetStats();
for (size_t i = 0; i < kNumStreams; ++i) {
- ASSERT_TRUE(stats.substreams.find(kSendSsrcs[i]) !=
+ ASSERT_TRUE(stats.substreams.find(kVideoSendSsrcs[i]) !=
stats.substreams.end())
- << "No stats for SSRC: " << kSendSsrcs[i]
+ << "No stats for SSRC: " << kVideoSendSsrcs[i]
<< ", stats should exist as soon as frames have been encoded.";
VideoSendStream::StreamStats ssrc_stats =
- stats.substreams[kSendSsrcs[i]];
+ stats.substreams[kVideoSendSsrcs[i]];
EXPECT_EQ(kEncodedResolution[i].width, ssrc_stats.width);
EXPECT_EQ(kEncodedResolution[i].height, ssrc_stats.height);
}
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
@@ -1785,106 +1849,392 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
VideoSendStream* send_stream_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
-class VP9HeaderObeserver : public test::SendTest {
+class Vp9HeaderObserver : public test::SendTest {
public:
- VP9HeaderObeserver()
- : SendTest(VideoSendStreamTest::kDefaultTimeoutMs),
+ Vp9HeaderObserver()
+ : SendTest(VideoSendStreamTest::kLongTimeoutMs),
vp9_encoder_(VP9Encoder::Create()),
- vp9_settings_(VideoEncoder::GetDefaultVp9Settings()) {}
+ vp9_settings_(VideoEncoder::GetDefaultVp9Settings()),
+ packets_sent_(0),
+ frames_sent_(0) {}
- virtual void ModifyConfigsHook(
+ virtual void ModifyVideoConfigsHook(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) {}
- virtual void InspectHeader(RTPVideoHeaderVP9* vp9videoHeader) = 0;
+ virtual void InspectHeader(const RTPVideoHeaderVP9& vp9) = 0;
private:
const int kVp9PayloadType = 105;
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
encoder_config->encoder_specific_settings = &vp9_settings_;
send_config->encoder_settings.encoder = vp9_encoder_.get();
send_config->encoder_settings.payload_name = "VP9";
send_config->encoder_settings.payload_type = kVp9PayloadType;
- ModifyConfigsHook(send_config, receive_configs, encoder_config);
+ ModifyVideoConfigsHook(send_config, receive_configs, encoder_config);
+ EXPECT_EQ(1u, encoder_config->streams.size());
+ encoder_config->streams[0].temporal_layer_thresholds_bps.resize(
+ vp9_settings_.numberOfTemporalLayers - 1);
+ encoder_config_ = *encoder_config;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Test timed out waiting for VP9 packet";
+ EXPECT_TRUE(Wait()) << "Test timed out waiting for VP9 packet, num frames "
+ << frames_sent_;
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, length, &header));
- if (header.payloadType == kVp9PayloadType) {
- RtpDepacketizerVp9 vp9depacketizer;
- RtpDepacketizer::ParsedPayload vp9payload;
- const uint8_t* vp9_packet = packet + header.headerLength;
- size_t payload_length =
- length - header.headerLength - header.paddingLength;
-
- if (payload_length > 0) {
- bool parse_vp9header_successful =
- vp9depacketizer.Parse(&vp9payload, vp9_packet, payload_length);
- bool is_vp9_codec_type =
- vp9payload.type.Video.codec == RtpVideoCodecTypes::kRtpVideoVp9;
- EXPECT_TRUE(parse_vp9header_successful);
- EXPECT_TRUE(is_vp9_codec_type);
-
- RTPVideoHeaderVP9* vp9videoHeader =
- &vp9payload.type.Video.codecHeader.VP9;
- if (parse_vp9header_successful && is_vp9_codec_type) {
- InspectHeader(vp9videoHeader);
- } else {
- observation_complete_->Set();
- }
+ EXPECT_EQ(kVp9PayloadType, header.payloadType);
+ const uint8_t* payload = packet + header.headerLength;
+ size_t payload_length = length - header.headerLength - header.paddingLength;
+
+ bool new_packet = packets_sent_ == 0 ||
+ IsNewerSequenceNumber(header.sequenceNumber,
+ last_header_.sequenceNumber);
+ if (payload_length > 0 && new_packet) {
+ RtpDepacketizer::ParsedPayload parsed;
+ RtpDepacketizerVp9 depacketizer;
+ EXPECT_TRUE(depacketizer.Parse(&parsed, payload, payload_length));
+ EXPECT_EQ(RtpVideoCodecTypes::kRtpVideoVp9, parsed.type.Video.codec);
+ // Verify common fields for all configurations.
+ VerifyCommonHeader(parsed.type.Video.codecHeader.VP9);
+ CompareConsecutiveFrames(header, parsed.type.Video);
+ // Verify configuration specific settings.
+ InspectHeader(parsed.type.Video.codecHeader.VP9);
+
+ ++packets_sent_;
+ if (header.markerBit) {
+ ++frames_sent_;
}
+ last_header_ = header;
+ last_vp9_ = parsed.type.Video.codecHeader.VP9;
}
-
return SEND_PACKET;
}
protected:
+ bool ContinuousPictureId(const RTPVideoHeaderVP9& vp9) const {
+ if (last_vp9_.picture_id > vp9.picture_id) {
+ return vp9.picture_id == 0; // Wrap.
+ } else {
+ return vp9.picture_id == last_vp9_.picture_id + 1;
+ }
+ }
+
+ void VerifySpatialIdxWithinFrame(const RTPVideoHeaderVP9& vp9) const {
+ bool new_layer = vp9.spatial_idx != last_vp9_.spatial_idx;
+ EXPECT_EQ(new_layer, vp9.beginning_of_frame);
+ EXPECT_EQ(new_layer, last_vp9_.end_of_frame);
+ EXPECT_EQ(new_layer ? last_vp9_.spatial_idx + 1 : last_vp9_.spatial_idx,
+ vp9.spatial_idx);
+ }
+
+ void VerifyFixedTemporalLayerStructure(const RTPVideoHeaderVP9& vp9,
+ uint8_t num_layers) const {
+ switch (num_layers) {
+ case 0:
+ VerifyTemporalLayerStructure0(vp9);
+ break;
+ case 1:
+ VerifyTemporalLayerStructure1(vp9);
+ break;
+ case 2:
+ VerifyTemporalLayerStructure2(vp9);
+ break;
+ case 3:
+ VerifyTemporalLayerStructure3(vp9);
+ break;
+ default:
+ RTC_NOTREACHED();
+ }
+ }
+
+ void VerifyTemporalLayerStructure0(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_EQ(kNoTl0PicIdx, vp9.tl0_pic_idx);
+ EXPECT_EQ(kNoTemporalIdx, vp9.temporal_idx); // no tid
+ EXPECT_FALSE(vp9.temporal_up_switch);
+ }
+
+ void VerifyTemporalLayerStructure1(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
+ EXPECT_EQ(0, vp9.temporal_idx); // 0,0,0,...
+ EXPECT_FALSE(vp9.temporal_up_switch);
+ }
+
+ void VerifyTemporalLayerStructure2(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
+ EXPECT_GE(vp9.temporal_idx, 0); // 0,1,0,1,... (tid reset on I-frames).
+ EXPECT_LE(vp9.temporal_idx, 1);
+ EXPECT_EQ(vp9.temporal_idx > 0, vp9.temporal_up_switch);
+ if (IsNewPictureId(vp9)) {
+ uint8_t expected_tid =
+ (!vp9.inter_pic_predicted || last_vp9_.temporal_idx == 1) ? 0 : 1;
+ EXPECT_EQ(expected_tid, vp9.temporal_idx);
+ }
+ }
+
+ void VerifyTemporalLayerStructure3(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
+ EXPECT_GE(vp9.temporal_idx, 0); // 0,2,1,2,... (tid reset on I-frames).
+ EXPECT_LE(vp9.temporal_idx, 2);
+ if (IsNewPictureId(vp9) && vp9.inter_pic_predicted) {
+ EXPECT_NE(vp9.temporal_idx, last_vp9_.temporal_idx);
+ switch (vp9.temporal_idx) {
+ case 0:
+ EXPECT_EQ(2, last_vp9_.temporal_idx);
+ EXPECT_FALSE(vp9.temporal_up_switch);
+ break;
+ case 1:
+ EXPECT_EQ(2, last_vp9_.temporal_idx);
+ EXPECT_TRUE(vp9.temporal_up_switch);
+ break;
+ case 2:
+ EXPECT_EQ(last_vp9_.temporal_idx == 0, vp9.temporal_up_switch);
+ break;
+ }
+ }
+ }
+
+ void VerifyTl0Idx(const RTPVideoHeaderVP9& vp9) const {
+ if (vp9.tl0_pic_idx == kNoTl0PicIdx)
+ return;
+
+ uint8_t expected_tl0_idx = last_vp9_.tl0_pic_idx;
+ if (vp9.temporal_idx == 0)
+ ++expected_tl0_idx;
+ EXPECT_EQ(expected_tl0_idx, vp9.tl0_pic_idx);
+ }
+
+ bool IsNewPictureId(const RTPVideoHeaderVP9& vp9) const {
+ return frames_sent_ > 0 && (vp9.picture_id != last_vp9_.picture_id);
+ }
+
+ // Flexible mode (F=1): Non-flexible mode (F=0):
+ //
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // |I|P|L|F|B|E|V|-| |I|P|L|F|B|E|V|-|
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // I: |M| PICTURE ID | I: |M| PICTURE ID |
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // M: | EXTENDED PID | M: | EXTENDED PID |
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // L: | T |U| S |D| L: | T |U| S |D|
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // P,F: | P_DIFF |X|N| | TL0PICIDX |
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // X: |EXTENDED P_DIFF| V: | SS .. |
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // V: | SS .. |
+ // +-+-+-+-+-+-+-+-+
+ void VerifyCommonHeader(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_EQ(kMaxTwoBytePictureId, vp9.max_picture_id); // M:1
+ EXPECT_NE(kNoPictureId, vp9.picture_id); // I:1
+ EXPECT_EQ(vp9_settings_.flexibleMode, vp9.flexible_mode); // F
+ EXPECT_GE(vp9.spatial_idx, 0); // S
+ EXPECT_LT(vp9.spatial_idx, vp9_settings_.numberOfSpatialLayers);
+ if (vp9.ss_data_available) // V
+ VerifySsData(vp9);
+
+ if (frames_sent_ == 0)
+ EXPECT_FALSE(vp9.inter_pic_predicted); // P
+
+ if (!vp9.inter_pic_predicted) {
+ EXPECT_TRUE(vp9.temporal_idx == 0 || vp9.temporal_idx == kNoTemporalIdx);
+ EXPECT_FALSE(vp9.temporal_up_switch);
+ }
+ }
+
+ // Scalability structure (SS).
+ //
+ // +-+-+-+-+-+-+-+-+
+ // V: | N_S |Y|G|-|-|-|
+ // +-+-+-+-+-+-+-+-+
+ // Y: | WIDTH | N_S + 1 times
+ // +-+-+-+-+-+-+-+-+
+ // | HEIGHT |
+ // +-+-+-+-+-+-+-+-+
+ // G: | N_G |
+ // +-+-+-+-+-+-+-+-+
+ // N_G: | T |U| R |-|-| N_G times
+ // +-+-+-+-+-+-+-+-+
+ // | P_DIFF | R times
+ // +-+-+-+-+-+-+-+-+
+ void VerifySsData(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_TRUE(vp9.ss_data_available); // V
+ EXPECT_EQ(vp9_settings_.numberOfSpatialLayers, // N_S + 1
+ vp9.num_spatial_layers);
+ EXPECT_TRUE(vp9.spatial_layer_resolution_present); // Y:1
+ size_t expected_width = encoder_config_.streams[0].width;
+ size_t expected_height = encoder_config_.streams[0].height;
+ for (int i = vp9.num_spatial_layers - 1; i >= 0; --i) {
+ EXPECT_EQ(expected_width, vp9.width[i]); // WIDTH
+ EXPECT_EQ(expected_height, vp9.height[i]); // HEIGHT
+ expected_width /= 2;
+ expected_height /= 2;
+ }
+ }
+
+ void CompareConsecutiveFrames(const RTPHeader& header,
+ const RTPVideoHeader& video) const {
+ const RTPVideoHeaderVP9& vp9 = video.codecHeader.VP9;
+
+ bool new_frame = packets_sent_ == 0 ||
+ IsNewerTimestamp(header.timestamp, last_header_.timestamp);
+ EXPECT_EQ(new_frame, video.isFirstPacket);
+ if (!new_frame) {
+ EXPECT_FALSE(last_header_.markerBit);
+ EXPECT_EQ(last_header_.timestamp, header.timestamp);
+ EXPECT_EQ(last_vp9_.picture_id, vp9.picture_id);
+ EXPECT_EQ(last_vp9_.temporal_idx, vp9.temporal_idx);
+ EXPECT_EQ(last_vp9_.tl0_pic_idx, vp9.tl0_pic_idx);
+ VerifySpatialIdxWithinFrame(vp9);
+ return;
+ }
+ // New frame.
+ EXPECT_TRUE(vp9.beginning_of_frame);
+
+ // Compare with last packet in previous frame.
+ if (frames_sent_ == 0)
+ return;
+ EXPECT_TRUE(last_vp9_.end_of_frame);
+ EXPECT_TRUE(last_header_.markerBit);
+ EXPECT_TRUE(ContinuousPictureId(vp9));
+ VerifyTl0Idx(vp9);
+ }
+
rtc::scoped_ptr<VP9Encoder> vp9_encoder_;
VideoCodecVP9 vp9_settings_;
+ webrtc::VideoEncoderConfig encoder_config_;
+ RTPHeader last_header_;
+ RTPVideoHeaderVP9 last_vp9_;
+ size_t packets_sent_;
+ size_t frames_sent_;
};
-TEST_F(VideoSendStreamTest, VP9NoFlexMode) {
- class NoFlexibleMode : public VP9HeaderObeserver {
- void InspectHeader(RTPVideoHeaderVP9* vp9videoHeader) override {
- EXPECT_FALSE(vp9videoHeader->flexible_mode);
- observation_complete_->Set();
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl1SLayers) {
+ const uint8_t kNumTemporalLayers = 1;
+ const uint8_t kNumSpatialLayers = 1;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_2Tl1SLayers) {
+ const uint8_t kNumTemporalLayers = 2;
+ const uint8_t kNumSpatialLayers = 1;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_3Tl1SLayers) {
+ const uint8_t kNumTemporalLayers = 3;
+ const uint8_t kNumSpatialLayers = 1;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl2SLayers) {
+ const uint8_t kNumTemporalLayers = 1;
+ const uint8_t kNumSpatialLayers = 2;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_2Tl2SLayers) {
+ const uint8_t kNumTemporalLayers = 2;
+ const uint8_t kNumSpatialLayers = 2;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_3Tl2SLayers) {
+ const uint8_t kNumTemporalLayers = 3;
+ const uint8_t kNumSpatialLayers = 2;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+void VideoSendStreamTest::TestVp9NonFlexMode(uint8_t num_temporal_layers,
+ uint8_t num_spatial_layers) {
+ static const size_t kNumFramesToSend = 100;
+ // Set to < kNumFramesToSend and coprime to length of temporal layer
+ // structures to verify temporal id reset on key frame.
+ static const int kKeyFrameInterval = 31;
+ class NonFlexibleMode : public Vp9HeaderObserver {
+ public:
+ NonFlexibleMode(uint8_t num_temporal_layers, uint8_t num_spatial_layers)
+ : num_temporal_layers_(num_temporal_layers),
+ num_spatial_layers_(num_spatial_layers),
+ l_field_(num_temporal_layers > 1 || num_spatial_layers > 1) {}
+ void ModifyVideoConfigsHook(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ vp9_settings_.flexibleMode = false;
+ vp9_settings_.frameDroppingOn = false;
+ vp9_settings_.keyFrameInterval = kKeyFrameInterval;
+ vp9_settings_.numberOfTemporalLayers = num_temporal_layers_;
+ vp9_settings_.numberOfSpatialLayers = num_spatial_layers_;
+ }
+
+ void InspectHeader(const RTPVideoHeaderVP9& vp9) override {
+ bool ss_data_expected = !vp9.inter_pic_predicted &&
+ vp9.beginning_of_frame && vp9.spatial_idx == 0;
+ EXPECT_EQ(ss_data_expected, vp9.ss_data_available);
+ EXPECT_EQ(vp9.spatial_idx > 0, vp9.inter_layer_predicted); // D
+ EXPECT_EQ(!vp9.inter_pic_predicted,
+ frames_sent_ % kKeyFrameInterval == 0);
+
+ if (IsNewPictureId(vp9)) {
+ EXPECT_EQ(0, vp9.spatial_idx);
+ EXPECT_EQ(num_spatial_layers_ - 1, last_vp9_.spatial_idx);
+ }
+
+ VerifyFixedTemporalLayerStructure(vp9,
+ l_field_ ? num_temporal_layers_ : 0);
+
+ if (frames_sent_ > kNumFramesToSend)
+ observation_complete_.Set();
}
- } test;
+ const uint8_t num_temporal_layers_;
+ const uint8_t num_spatial_layers_;
+ const bool l_field_;
+ } test(num_temporal_layers, num_spatial_layers);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
-TEST_F(VideoSendStreamTest, DISABLED_VP9FlexMode) {
- class FlexibleMode : public VP9HeaderObeserver {
- void ModifyConfigsHook(
+#if !defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5402.
+TEST_F(VideoSendStreamTest, Vp9FlexModeRefCount) {
+ class FlexibleMode : public Vp9HeaderObserver {
+ void ModifyVideoConfigsHook(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
+ encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
vp9_settings_.flexibleMode = true;
+ vp9_settings_.numberOfTemporalLayers = 1;
+ vp9_settings_.numberOfSpatialLayers = 2;
}
- void InspectHeader(RTPVideoHeaderVP9* vp9videoHeader) override {
- EXPECT_TRUE(vp9videoHeader->flexible_mode);
- observation_complete_->Set();
+ void InspectHeader(const RTPVideoHeaderVP9& vp9_header) override {
+ EXPECT_TRUE(vp9_header.flexible_mode);
+ EXPECT_EQ(kNoTl0PicIdx, vp9_header.tl0_pic_idx);
+ if (vp9_header.inter_pic_predicted) {
+ EXPECT_GT(vp9_header.num_ref_pics, 0u);
+ observation_complete_.Set();
+ }
}
-
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
+#endif
} // namespace webrtc
diff --git a/webrtc/video/vie_channel.cc b/webrtc/video/vie_channel.cc
new file mode 100644
index 0000000000..bc23c9d467
--- /dev/null
+++ b/webrtc/video/vie_channel.cc
@@ -0,0 +1,1218 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_channel.h"
+
+#include <algorithm>
+#include <map>
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/common.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/video/call_stats.h"
+#include "webrtc/video/payload_router.h"
+#include "webrtc/video/receive_statistics_proxy.h"
+#include "webrtc/video/report_block_stats.h"
+
+namespace webrtc {
+
+const int kMaxDecodeWaitTimeMs = 50;
+static const int kMaxTargetDelayMs = 10000;
+const int kMinSendSidePacketHistorySize = 600;
+const int kMaxPacketAgeToNack = 450;
+const int kMaxNackListSize = 250;
+
+// Helper class receiving statistics callbacks.
+class ChannelStatsObserver : public CallStatsObserver {
+ public:
+ explicit ChannelStatsObserver(ViEChannel* owner) : owner_(owner) {}
+ virtual ~ChannelStatsObserver() {}
+
+ // Implements StatsObserver.
+ virtual void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
+ owner_->OnRttUpdate(avg_rtt_ms, max_rtt_ms);
+ }
+
+ private:
+ ViEChannel* const owner_;
+};
+
+class ViEChannelProtectionCallback : public VCMProtectionCallback {
+ public:
+ explicit ViEChannelProtectionCallback(ViEChannel* owner) : owner_(owner) {}
+ ~ViEChannelProtectionCallback() {}
+
+
+ int ProtectionRequest(
+ const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps) override {
+ return owner_->ProtectionRequest(delta_fec_params, key_fec_params,
+ sent_video_rate_bps, sent_nack_rate_bps,
+ sent_fec_rate_bps);
+ }
+ private:
+ ViEChannel* owner_;
+};
+
+ViEChannel::ViEChannel(uint32_t number_of_cores,
+ Transport* transport,
+ ProcessThread* module_process_thread,
+ RtcpIntraFrameObserver* intra_frame_observer,
+ RtcpBandwidthObserver* bandwidth_observer,
+ TransportFeedbackObserver* transport_feedback_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtcpRttStats* rtt_stats,
+ PacedSender* paced_sender,
+ PacketRouter* packet_router,
+ size_t max_rtp_streams,
+ bool sender)
+ : number_of_cores_(number_of_cores),
+ sender_(sender),
+ module_process_thread_(module_process_thread),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ send_payload_router_(new PayloadRouter()),
+ vcm_protection_callback_(new ViEChannelProtectionCallback(this)),
+ vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
+ nullptr,
+ nullptr)),
+ vie_receiver_(vcm_, remote_bitrate_estimator, this),
+ vie_sync_(vcm_),
+ stats_observer_(new ChannelStatsObserver(this)),
+ receive_stats_callback_(nullptr),
+ incoming_video_stream_(nullptr),
+ intra_frame_observer_(intra_frame_observer),
+ rtt_stats_(rtt_stats),
+ paced_sender_(paced_sender),
+ packet_router_(packet_router),
+ bandwidth_observer_(bandwidth_observer),
+ transport_feedback_observer_(transport_feedback_observer),
+ decode_thread_(ChannelDecodeThreadFunction, this, "DecodingThread"),
+ nack_history_size_sender_(kMinSendSidePacketHistorySize),
+ max_nack_reordering_threshold_(kMaxPacketAgeToNack),
+ pre_render_callback_(NULL),
+ report_block_stats_sender_(new ReportBlockStats()),
+ time_of_first_rtt_ms_(-1),
+ rtt_sum_ms_(0),
+ last_rtt_ms_(0),
+ num_rtts_(0),
+ rtp_rtcp_modules_(
+ CreateRtpRtcpModules(!sender,
+ vie_receiver_.GetReceiveStatistics(),
+ transport,
+ intra_frame_observer_,
+ bandwidth_observer_.get(),
+ transport_feedback_observer_,
+ rtt_stats_,
+ &rtcp_packet_type_counter_observer_,
+ remote_bitrate_estimator,
+ paced_sender_,
+ packet_router_,
+ &send_bitrate_observer_,
+ &send_frame_count_observer_,
+ &send_side_delay_observer_,
+ max_rtp_streams)),
+ num_active_rtp_rtcp_modules_(1) {
+ vie_receiver_.SetRtpRtcpModule(rtp_rtcp_modules_[0]);
+ vcm_->SetNackSettings(kMaxNackListSize, max_nack_reordering_threshold_, 0);
+}
+
+int32_t ViEChannel::Init() {
+ static const int kDefaultRenderDelayMs = 10;
+ module_process_thread_->RegisterModule(vie_receiver_.GetReceiveStatistics());
+
+ // RTP/RTCP initialization.
+ module_process_thread_->RegisterModule(rtp_rtcp_modules_[0]);
+
+ rtp_rtcp_modules_[0]->SetKeyFrameRequestMethod(kKeyFrameReqPliRtcp);
+ if (paced_sender_) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+ }
+ packet_router_->AddRtpModule(rtp_rtcp_modules_[0]);
+ if (sender_) {
+ std::list<RtpRtcp*> send_rtp_modules(1, rtp_rtcp_modules_[0]);
+ send_payload_router_->SetSendingRtpModules(send_rtp_modules);
+ RTC_DCHECK(!send_payload_router_->active());
+ }
+ if (vcm_->RegisterReceiveCallback(this) != 0) {
+ return -1;
+ }
+ vcm_->RegisterFrameTypeCallback(this);
+ vcm_->RegisterReceiveStatisticsCallback(this);
+ vcm_->RegisterDecoderTimingCallback(this);
+ vcm_->SetRenderDelay(kDefaultRenderDelayMs);
+
+ module_process_thread_->RegisterModule(vcm_);
+ module_process_thread_->RegisterModule(&vie_sync_);
+
+ return 0;
+}
+
+ViEChannel::~ViEChannel() {
+ UpdateHistograms();
+ // Make sure we don't get more callbacks from the RTP module.
+ module_process_thread_->DeRegisterModule(
+ vie_receiver_.GetReceiveStatistics());
+ module_process_thread_->DeRegisterModule(vcm_);
+ module_process_thread_->DeRegisterModule(&vie_sync_);
+ send_payload_router_->SetSendingRtpModules(std::list<RtpRtcp*>());
+ for (size_t i = 0; i < num_active_rtp_rtcp_modules_; ++i)
+ packet_router_->RemoveRtpModule(rtp_rtcp_modules_[i]);
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ module_process_thread_->DeRegisterModule(rtp_rtcp);
+ delete rtp_rtcp;
+ }
+ if (!sender_)
+ StopDecodeThread();
+ // Release modules.
+ VideoCodingModule::Destroy(vcm_);
+}
+
+void ViEChannel::UpdateHistograms() {
+ int64_t now = Clock::GetRealTimeClock()->TimeInMilliseconds();
+
+ {
+ CriticalSectionScoped cs(crit_.get());
+ int64_t elapsed_sec = (now - time_of_first_rtt_ms_) / 1000;
+ if (time_of_first_rtt_ms_ != -1 && num_rtts_ > 0 &&
+ elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ int64_t avg_rtt_ms = (rtt_sum_ms_ + num_rtts_ / 2) / num_rtts_;
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.AverageRoundTripTimeInMilliseconds", avg_rtt_ms);
+ }
+ }
+
+ if (sender_) {
+ RtcpPacketTypeCounter rtcp_counter;
+ GetSendRtcpPacketTypeCounter(&rtcp_counter);
+ int64_t elapsed_sec = rtcp_counter.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.NackPacketsReceivedPerMinute",
+ rtcp_counter.nack_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.FirPacketsReceivedPerMinute",
+ rtcp_counter.fir_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.PliPacketsReceivedPerMinute",
+ rtcp_counter.pli_packets * 60 / elapsed_sec);
+ if (rtcp_counter.nack_requests > 0) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.UniqueNackRequestsReceivedInPercent",
+ rtcp_counter.UniqueNackRequestsInPercent());
+ }
+ int fraction_lost = report_block_stats_sender_->FractionLostInPercent();
+ if (fraction_lost != -1) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE("WebRTC.Video.SentPacketsLostInPercent",
+ fraction_lost);
+ }
+ }
+
+ StreamDataCounters rtp;
+ StreamDataCounters rtx;
+ GetSendStreamDataCounters(&rtp, &rtx);
+ StreamDataCounters rtp_rtx = rtp;
+ rtp_rtx.Add(rtx);
+ elapsed_sec = rtp_rtx.TimeSinceFirstPacketInMs(
+ Clock::GetRealTimeClock()->TimeInMilliseconds()) /
+ 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Video.BitrateSentInKbps",
+ static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.MediaBitrateSentInKbps",
+ static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.PaddingBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.RetransmittedBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
+ elapsed_sec / 1000));
+ if (rtp_rtcp_modules_[0]->RtxSendStatus() != kRtxOff) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.RtxBitrateSentInKbps",
+ static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ bool fec_enabled = false;
+ uint8_t pltype_red;
+ uint8_t pltype_fec;
+ rtp_rtcp_modules_[0]->GenericFECStatus(&fec_enabled, &pltype_red,
+ &pltype_fec);
+ if (fec_enabled) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.FecBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.fec.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ }
+ } else if (vie_receiver_.GetRemoteSsrc() > 0) {
+ // Get receive stats if we are receiving packets, i.e. there is a remote
+ // ssrc.
+ RtcpPacketTypeCounter rtcp_counter;
+ GetReceiveRtcpPacketTypeCounter(&rtcp_counter);
+ int64_t elapsed_sec = rtcp_counter.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.NackPacketsSentPerMinute",
+ rtcp_counter.nack_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.FirPacketsSentPerMinute",
+ rtcp_counter.fir_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.PliPacketsSentPerMinute",
+ rtcp_counter.pli_packets * 60 / elapsed_sec);
+ if (rtcp_counter.nack_requests > 0) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.UniqueNackRequestsSentInPercent",
+ rtcp_counter.UniqueNackRequestsInPercent());
+ }
+ }
+
+ StreamDataCounters rtp;
+ StreamDataCounters rtx;
+ GetReceiveStreamDataCounters(&rtp, &rtx);
+ StreamDataCounters rtp_rtx = rtp;
+ rtp_rtx.Add(rtx);
+ elapsed_sec = rtp_rtx.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.BitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.MediaBitrateReceivedInKbps",
+ static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.PaddingBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.RetransmittedBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
+ elapsed_sec / 1000));
+ uint32_t ssrc = 0;
+ if (vie_receiver_.GetRtxSsrc(&ssrc)) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.RtxBitrateReceivedInKbps",
+ static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ if (vie_receiver_.IsFecEnabled()) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.FecBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.fec.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ }
+ }
+}
+
+int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
+ bool new_stream) {
+ RTC_DCHECK(sender_);
+ if (video_codec.codecType == kVideoCodecRED ||
+ video_codec.codecType == kVideoCodecULPFEC) {
+ LOG_F(LS_ERROR) << "Not a valid send codec " << video_codec.codecType;
+ return -1;
+ }
+ if (kMaxSimulcastStreams < video_codec.numberOfSimulcastStreams) {
+ LOG_F(LS_ERROR) << "Incorrect config "
+ << video_codec.numberOfSimulcastStreams;
+ return -1;
+ }
+ // Update the RTP module with the settings.
+ // Stop and Start the RTP module -> trigger new SSRC, if an SSRC hasn't been
+ // set explicitly.
+ // The first layer is always active, so the first module can be checked for
+ // sending status.
+ bool is_sending = rtp_rtcp_modules_[0]->Sending();
+ bool router_was_active = send_payload_router_->active();
+ send_payload_router_->set_active(false);
+ send_payload_router_->SetSendingRtpModules(std::list<RtpRtcp*>());
+
+ std::vector<RtpRtcp*> registered_modules;
+ std::vector<RtpRtcp*> deregistered_modules;
+ size_t num_active_modules = video_codec.numberOfSimulcastStreams > 0
+ ? video_codec.numberOfSimulcastStreams
+ : 1;
+ size_t num_prev_active_modules;
+ {
+ // Cache which modules are active so StartSend can know which ones to start.
+ CriticalSectionScoped cs(crit_.get());
+ num_prev_active_modules = num_active_rtp_rtcp_modules_;
+ num_active_rtp_rtcp_modules_ = num_active_modules;
+ }
+ for (size_t i = 0; i < num_active_modules; ++i)
+ registered_modules.push_back(rtp_rtcp_modules_[i]);
+
+ for (size_t i = num_active_modules; i < rtp_rtcp_modules_.size(); ++i)
+ deregistered_modules.push_back(rtp_rtcp_modules_[i]);
+
+ // Disable inactive modules.
+ for (RtpRtcp* rtp_rtcp : deregistered_modules) {
+ rtp_rtcp->SetSendingStatus(false);
+ rtp_rtcp->SetSendingMediaStatus(false);
+ }
+
+ // Configure active modules.
+ for (RtpRtcp* rtp_rtcp : registered_modules) {
+ rtp_rtcp->DeRegisterSendPayload(video_codec.plType);
+ if (rtp_rtcp->RegisterSendPayload(video_codec) != 0) {
+ return -1;
+ }
+ rtp_rtcp->SetSendingStatus(is_sending);
+ rtp_rtcp->SetSendingMediaStatus(is_sending);
+ }
+
+ // |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
+ // modules can be deleted after this step.
+ vie_receiver_.RegisterRtpRtcpModules(registered_modules);
+
+ // Update the packet and payload routers with the sending RtpRtcp modules.
+ if (sender_) {
+ std::list<RtpRtcp*> active_send_modules;
+ for (RtpRtcp* rtp_rtcp : registered_modules)
+ active_send_modules.push_back(rtp_rtcp);
+ send_payload_router_->SetSendingRtpModules(active_send_modules);
+ }
+
+ if (router_was_active)
+ send_payload_router_->set_active(true);
+
+ // Deregister previously registered modules.
+ for (size_t i = num_active_modules; i < num_prev_active_modules; ++i) {
+ module_process_thread_->DeRegisterModule(rtp_rtcp_modules_[i]);
+ packet_router_->RemoveRtpModule(rtp_rtcp_modules_[i]);
+ }
+ // Register new active modules.
+ for (size_t i = num_prev_active_modules; i < num_active_modules; ++i) {
+ module_process_thread_->RegisterModule(rtp_rtcp_modules_[i]);
+ packet_router_->AddRtpModule(rtp_rtcp_modules_[i]);
+ }
+ return 0;
+}
+
+int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
+ RTC_DCHECK(!sender_);
+ if (!vie_receiver_.SetReceiveCodec(video_codec)) {
+ return -1;
+ }
+
+ if (video_codec.codecType != kVideoCodecRED &&
+ video_codec.codecType != kVideoCodecULPFEC) {
+ // Register codec type with VCM, but do not register RED or ULPFEC.
+ if (vcm_->RegisterReceiveCodec(&video_codec, number_of_cores_, false) !=
+ VCM_OK) {
+ return -1;
+ }
+ }
+ return 0;
+}
+
+void ViEChannel::RegisterExternalDecoder(const uint8_t pl_type,
+ VideoDecoder* decoder) {
+ RTC_DCHECK(!sender_);
+ vcm_->RegisterExternalDecoder(decoder, pl_type);
+}
+
+int32_t ViEChannel::ReceiveCodecStatistics(uint32_t* num_key_frames,
+ uint32_t* num_delta_frames) {
+ CriticalSectionScoped cs(crit_.get());
+ *num_key_frames = receive_frame_counts_.key_frames;
+ *num_delta_frames = receive_frame_counts_.delta_frames;
+ return 0;
+}
+
+uint32_t ViEChannel::DiscardedPackets() const {
+ return vcm_->DiscardedPackets();
+}
+
+int ViEChannel::ReceiveDelay() const {
+ return vcm_->Delay();
+}
+
+void ViEChannel::SetExpectedRenderDelay(int delay_ms) {
+ vcm_->SetRenderDelay(delay_ms);
+}
+
+void ViEChannel::SetRTCPMode(const RtcpMode rtcp_mode) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRTCPStatus(rtcp_mode);
+}
+
+void ViEChannel::SetProtectionMode(bool enable_nack,
+ bool enable_fec,
+ int payload_type_red,
+ int payload_type_fec) {
+ // Validate payload types.
+ if (enable_fec) {
+ RTC_DCHECK_GE(payload_type_red, 0);
+ RTC_DCHECK_GE(payload_type_fec, 0);
+ RTC_DCHECK_LE(payload_type_red, 127);
+ RTC_DCHECK_LE(payload_type_fec, 127);
+ } else {
+ RTC_DCHECK_EQ(payload_type_red, -1);
+ RTC_DCHECK_EQ(payload_type_fec, -1);
+ // Set to valid uint8_ts to be castable later without signed overflows.
+ payload_type_red = 0;
+ payload_type_fec = 0;
+ }
+
+ VCMVideoProtection protection_method;
+ if (enable_nack) {
+ protection_method = enable_fec ? kProtectionNackFEC : kProtectionNack;
+ } else {
+ protection_method = kProtectionNone;
+ }
+
+ vcm_->SetVideoProtection(protection_method, true);
+
+ // Set NACK.
+ ProcessNACKRequest(enable_nack);
+
+ // Set FEC.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->SetGenericFECStatus(enable_fec,
+ static_cast<uint8_t>(payload_type_red),
+ static_cast<uint8_t>(payload_type_fec));
+ }
+}
+
+void ViEChannel::ProcessNACKRequest(const bool enable) {
+ if (enable) {
+ // Turn on NACK.
+ if (rtp_rtcp_modules_[0]->RTCP() == RtcpMode::kOff)
+ return;
+ vie_receiver_.SetNackStatus(true, max_nack_reordering_threshold_);
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+
+ vcm_->RegisterPacketRequestCallback(this);
+ // Don't introduce errors when NACK is enabled.
+ vcm_->SetDecodeErrorMode(kNoErrors);
+ } else {
+ vcm_->RegisterPacketRequestCallback(NULL);
+ if (paced_sender_ == nullptr) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(false, 0);
+ }
+ vie_receiver_.SetNackStatus(false, max_nack_reordering_threshold_);
+ // When NACK is off, allow decoding with errors. Otherwise, the video
+ // will freeze, and will only recover with a complete key frame.
+ vcm_->SetDecodeErrorMode(kWithErrors);
+ }
+}
+
+bool ViEChannel::IsSendingFecEnabled() {
+ bool fec_enabled = false;
+ uint8_t pltype_red = 0;
+ uint8_t pltype_fec = 0;
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->GenericFECStatus(&fec_enabled, &pltype_red, &pltype_fec);
+ if (fec_enabled)
+ return true;
+ }
+ return false;
+}
+
+int ViEChannel::SetSenderBufferingMode(int target_delay_ms) {
+ if ((target_delay_ms < 0) || (target_delay_ms > kMaxTargetDelayMs)) {
+ LOG(LS_ERROR) << "Invalid send buffer value.";
+ return -1;
+ }
+ if (target_delay_ms == 0) {
+ // Real-time mode.
+ nack_history_size_sender_ = kMinSendSidePacketHistorySize;
+ } else {
+ nack_history_size_sender_ = GetRequiredNackListSize(target_delay_ms);
+ // Don't allow a number lower than the default value.
+ if (nack_history_size_sender_ < kMinSendSidePacketHistorySize) {
+ nack_history_size_sender_ = kMinSendSidePacketHistorySize;
+ }
+ }
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+ return 0;
+}
+
+int ViEChannel::GetRequiredNackListSize(int target_delay_ms) {
+ // The max size of the nack list should be large enough to accommodate the
+ // the number of packets (frames) resulting from the increased delay.
+ // Roughly estimating for ~40 packets per frame @ 30fps.
+ return target_delay_ms * 40 * 30 / 1000;
+}
+
+int ViEChannel::SetSendTimestampOffsetStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset);
+ }
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveTimestampOffsetStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveTimestampOffsetStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendAbsoluteSendTimeStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(kRtpExtensionAbsoluteSendTime);
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveAbsoluteSendTimeStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendVideoRotationStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(kRtpExtensionVideoRotation);
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionVideoRotation, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveVideoRotationStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveVideoRotationStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendTransportSequenceNumber(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber);
+ }
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveTransportSequenceNumber(bool enable, int id) {
+ return vie_receiver_.SetReceiveTransportSequenceNumber(enable, id) ? 0 : -1;
+}
+
+void ViEChannel::SetRtcpXrRrtrStatus(bool enable) {
+ rtp_rtcp_modules_[0]->SetRtcpXrRrtrStatus(enable);
+}
+
+void ViEChannel::EnableTMMBR(bool enable) {
+ rtp_rtcp_modules_[0]->SetTMMBRStatus(enable);
+}
+
+int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
+ const StreamType usage,
+ const uint8_t simulcast_idx) {
+ RtpRtcp* rtp_rtcp = rtp_rtcp_modules_[simulcast_idx];
+ if (usage == kViEStreamTypeRtx) {
+ rtp_rtcp->SetRtxSsrc(SSRC);
+ } else {
+ rtp_rtcp->SetSSRC(SSRC);
+ }
+ return 0;
+}
+
+int32_t ViEChannel::SetRemoteSSRCType(const StreamType usage,
+ const uint32_t SSRC) {
+ vie_receiver_.SetRtxSsrc(SSRC);
+ return 0;
+}
+
+int32_t ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
+ RTC_DCHECK_LE(idx, rtp_rtcp_modules_.size());
+ *ssrc = rtp_rtcp_modules_[idx]->SSRC();
+ return 0;
+}
+
+uint32_t ViEChannel::GetRemoteSSRC() {
+ return vie_receiver_.GetRemoteSsrc();
+}
+
+int ViEChannel::SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRtxSendPayloadType(payload_type, associated_payload_type);
+ SetRtxSendStatus(true);
+ return 0;
+}
+
+void ViEChannel::SetRtxSendStatus(bool enable) {
+ int rtx_settings =
+ enable ? kRtxRetransmitted | kRtxRedundantPayloads : kRtxOff;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRtxSendStatus(rtx_settings);
+}
+
+void ViEChannel::SetRtxReceivePayloadType(int payload_type,
+ int associated_payload_type) {
+ vie_receiver_.SetRtxPayloadType(payload_type, associated_payload_type);
+}
+
+void ViEChannel::SetUseRtxPayloadMappingOnRestore(bool val) {
+ vie_receiver_.SetUseRtxPayloadMappingOnRestore(val);
+}
+
+void ViEChannel::SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ if (rtp_rtcp->SetRtpStateForSsrc(ssrc, rtp_state))
+ return;
+ }
+}
+
+RtpState ViEChannel::GetRtpStateForSsrc(uint32_t ssrc) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ RtpState rtp_state;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ if (rtp_rtcp->GetRtpStateForSsrc(ssrc, &rtp_state))
+ return rtp_state;
+ }
+ LOG(LS_ERROR) << "Couldn't get RTP state for ssrc: " << ssrc;
+ return rtp_state;
+}
+
+// TODO(pbos): Set CNAME on all modules.
+int32_t ViEChannel::SetRTCPCName(const char* rtcp_cname) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ return rtp_rtcp_modules_[0]->SetCNAME(rtcp_cname);
+}
+
+int32_t ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
+ uint32_t remoteSSRC = vie_receiver_.GetRemoteSsrc();
+ return rtp_rtcp_modules_[0]->RemoteCNAME(remoteSSRC, rtcp_cname);
+}
+
+int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
+ uint32_t* cumulative_lost,
+ uint32_t* extended_max,
+ uint32_t* jitter_samples,
+ int64_t* rtt_ms) {
+ // Aggregate the report blocks associated with streams sent on this channel.
+ std::vector<RTCPReportBlock> report_blocks;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RemoteRTCPStat(&report_blocks);
+
+ if (report_blocks.empty())
+ return -1;
+
+ uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
+ std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
+ for (; it != report_blocks.end(); ++it) {
+ if (it->remoteSSRC == remote_ssrc)
+ break;
+ }
+ if (it == report_blocks.end()) {
+ // We have not received packets with an SSRC matching the report blocks. To
+ // have a chance of calculating an RTT we will try with the SSRC of the
+ // first report block received.
+ // This is very important for send-only channels where we don't know the
+ // SSRC of the other end.
+ remote_ssrc = report_blocks[0].remoteSSRC;
+ }
+
+ // TODO(asapersson): Change report_block_stats to not rely on
+ // GetSendRtcpStatistics to be called.
+ RTCPReportBlock report =
+ report_block_stats_sender_->AggregateAndStore(report_blocks);
+ *fraction_lost = report.fractionLost;
+ *cumulative_lost = report.cumulativeLost;
+ *extended_max = report.extendedHighSeqNum;
+ *jitter_samples = report.jitter;
+
+ int64_t dummy;
+ int64_t rtt = 0;
+ if (rtp_rtcp_modules_[0]->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) !=
+ 0) {
+ return -1;
+ }
+ *rtt_ms = rtt;
+ return 0;
+}
+
+void ViEChannel::RegisterSendChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RegisterRtcpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterReceiveChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ vie_receiver_.GetReceiveStatistics()->RegisterRtcpStatisticsCallback(
+ callback);
+ rtp_rtcp_modules_[0]->RegisterRtcpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterRtcpPacketTypeCounterObserver(
+ RtcpPacketTypeCounterObserver* observer) {
+ rtcp_packet_type_counter_observer_.Set(observer);
+}
+
+void ViEChannel::GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const {
+ *rtp_counters = StreamDataCounters();
+ *rtx_counters = StreamDataCounters();
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ StreamDataCounters rtp_data;
+ StreamDataCounters rtx_data;
+ rtp_rtcp->GetSendStreamDataCounters(&rtp_data, &rtx_data);
+ rtp_counters->Add(rtp_data);
+ rtx_counters->Add(rtx_data);
+ }
+}
+
+void ViEChannel::GetReceiveStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const {
+ StreamStatistician* statistician = vie_receiver_.GetReceiveStatistics()->
+ GetStatistician(vie_receiver_.GetRemoteSsrc());
+ if (statistician) {
+ statistician->GetReceiveStreamDataCounters(rtp_counters);
+ }
+ uint32_t rtx_ssrc = 0;
+ if (vie_receiver_.GetRtxSsrc(&rtx_ssrc)) {
+ StreamStatistician* statistician =
+ vie_receiver_.GetReceiveStatistics()->GetStatistician(rtx_ssrc);
+ if (statistician) {
+ statistician->GetReceiveStreamDataCounters(rtx_counters);
+ }
+ }
+}
+
+void ViEChannel::RegisterSendChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterReceiveChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ vie_receiver_.GetReceiveStatistics()->RegisterRtpStatisticsCallback(callback);
+}
+
+void ViEChannel::GetSendRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map =
+ rtcp_packet_type_counter_observer_.GetPacketTypeCounterMap();
+
+ RtcpPacketTypeCounter counter;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ counter.Add(counter_map[rtp_rtcp->SSRC()]);
+ *packet_counter = counter;
+}
+
+void ViEChannel::GetReceiveRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map =
+ rtcp_packet_type_counter_observer_.GetPacketTypeCounterMap();
+
+ RtcpPacketTypeCounter counter;
+ counter.Add(counter_map[vie_receiver_.GetRemoteSsrc()]);
+
+ *packet_counter = counter;
+}
+
+void ViEChannel::RegisterSendSideDelayObserver(
+ SendSideDelayObserver* observer) {
+ send_side_delay_observer_.Set(observer);
+}
+
+void ViEChannel::RegisterSendBitrateObserver(
+ BitrateStatisticsObserver* observer) {
+ send_bitrate_observer_.Set(observer);
+}
+
+int32_t ViEChannel::StartSend() {
+ CriticalSectionScoped cs(crit_.get());
+
+ if (rtp_rtcp_modules_[0]->Sending())
+ return -1;
+
+ for (size_t i = 0; i < num_active_rtp_rtcp_modules_; ++i) {
+ RtpRtcp* rtp_rtcp = rtp_rtcp_modules_[i];
+ rtp_rtcp->SetSendingMediaStatus(true);
+ rtp_rtcp->SetSendingStatus(true);
+ }
+ send_payload_router_->set_active(true);
+ return 0;
+}
+
+int32_t ViEChannel::StopSend() {
+ send_payload_router_->set_active(false);
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetSendingMediaStatus(false);
+
+ if (!rtp_rtcp_modules_[0]->Sending()) {
+ return -1;
+ }
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->SetSendingStatus(false);
+ }
+ return 0;
+}
+
+bool ViEChannel::Sending() {
+ return rtp_rtcp_modules_[0]->Sending();
+}
+
+void ViEChannel::StartReceive() {
+ if (!sender_)
+ StartDecodeThread();
+ vie_receiver_.StartReceive();
+}
+
+void ViEChannel::StopReceive() {
+ vie_receiver_.StopReceive();
+ if (!sender_) {
+ StopDecodeThread();
+ vcm_->ResetDecoder();
+ }
+}
+
+int32_t ViEChannel::ReceivedRTPPacket(const void* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ return vie_receiver_.ReceivedRTPPacket(
+ rtp_packet, rtp_packet_length, packet_time);
+}
+
+int32_t ViEChannel::ReceivedRTCPPacket(const void* rtcp_packet,
+ size_t rtcp_packet_length) {
+ return vie_receiver_.ReceivedRTCPPacket(rtcp_packet, rtcp_packet_length);
+}
+
+int32_t ViEChannel::SetMTU(uint16_t mtu) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetMaxTransferUnit(mtu);
+ return 0;
+}
+
+RtpRtcp* ViEChannel::rtp_rtcp() {
+ return rtp_rtcp_modules_[0];
+}
+
+rtc::scoped_refptr<PayloadRouter> ViEChannel::send_payload_router() {
+ return send_payload_router_;
+}
+
+VCMProtectionCallback* ViEChannel::vcm_protection_callback() {
+ return vcm_protection_callback_.get();
+}
+
+CallStatsObserver* ViEChannel::GetStatsObserver() {
+ return stats_observer_.get();
+}
+
+// Do not acquire the lock of |vcm_| in this function. Decode callback won't
+// necessarily be called from the decoding thread. The decoding thread may have
+// held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring
+// the same lock in the path of decode callback can deadlock.
+int32_t ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT
+ CriticalSectionScoped cs(crit_.get());
+
+ if (pre_render_callback_ != NULL)
+ pre_render_callback_->FrameCallback(&video_frame);
+
+ // TODO(pbos): Remove stream id argument.
+ incoming_video_stream_->RenderFrame(0xFFFFFFFF, video_frame);
+ return 0;
+}
+
+int32_t ViEChannel::ReceivedDecodedReferenceFrame(
+ const uint64_t picture_id) {
+ return rtp_rtcp_modules_[0]->SendRTCPReferencePictureSelection(picture_id);
+}
+
+void ViEChannel::OnIncomingPayloadType(int payload_type) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnIncomingPayloadType(payload_type);
+}
+
+void ViEChannel::OnDecoderImplementationName(const char* implementation_name) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnDecoderImplementationName(implementation_name);
+}
+
+void ViEChannel::OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnIncomingRate(frame_rate, bit_rate);
+}
+
+void ViEChannel::OnDiscardedPacketsUpdated(int discarded_packets) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnDiscardedPacketsUpdated(discarded_packets);
+}
+
+void ViEChannel::OnFrameCountsUpdated(const FrameCounts& frame_counts) {
+ CriticalSectionScoped cs(crit_.get());
+ receive_frame_counts_ = frame_counts;
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnFrameCountsUpdated(frame_counts);
+}
+
+void ViEChannel::OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) {
+ CriticalSectionScoped cs(crit_.get());
+ if (!receive_stats_callback_)
+ return;
+ receive_stats_callback_->OnDecoderTiming(
+ decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
+ jitter_buffer_ms, min_playout_delay_ms, render_delay_ms, last_rtt_ms_);
+}
+
+int32_t ViEChannel::RequestKeyFrame() {
+ return rtp_rtcp_modules_[0]->RequestKeyFrame();
+}
+
+int32_t ViEChannel::SliceLossIndicationRequest(
+ const uint64_t picture_id) {
+ return rtp_rtcp_modules_[0]->SendRTCPSliceLossIndication(
+ static_cast<uint8_t>(picture_id));
+}
+
+int32_t ViEChannel::ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) {
+ return rtp_rtcp_modules_[0]->SendNACK(sequence_numbers, length);
+}
+
+bool ViEChannel::ChannelDecodeThreadFunction(void* obj) {
+ return static_cast<ViEChannel*>(obj)->ChannelDecodeProcess();
+}
+
+bool ViEChannel::ChannelDecodeProcess() {
+ vcm_->Decode(kMaxDecodeWaitTimeMs);
+ return true;
+}
+
+void ViEChannel::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
+ vcm_->SetReceiveChannelParameters(max_rtt_ms);
+
+ CriticalSectionScoped cs(crit_.get());
+ if (time_of_first_rtt_ms_ == -1)
+ time_of_first_rtt_ms_ = Clock::GetRealTimeClock()->TimeInMilliseconds();
+ rtt_sum_ms_ += avg_rtt_ms;
+ last_rtt_ms_ = avg_rtt_ms;
+ ++num_rtts_;
+}
+
+int ViEChannel::ProtectionRequest(const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* video_rate_bps,
+ uint32_t* nack_rate_bps,
+ uint32_t* fec_rate_bps) {
+ *video_rate_bps = 0;
+ *nack_rate_bps = 0;
+ *fec_rate_bps = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ uint32_t not_used = 0;
+ uint32_t module_video_rate = 0;
+ uint32_t module_fec_rate = 0;
+ uint32_t module_nack_rate = 0;
+ rtp_rtcp->SetFecParameters(delta_fec_params, key_fec_params);
+ rtp_rtcp->BitrateSent(&not_used, &module_video_rate, &module_fec_rate,
+ &module_nack_rate);
+ *video_rate_bps += module_video_rate;
+ *nack_rate_bps += module_nack_rate;
+ *fec_rate_bps += module_fec_rate;
+ }
+ return 0;
+}
+
+std::vector<RtpRtcp*> ViEChannel::CreateRtpRtcpModules(
+ bool receiver_only,
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpIntraFrameObserver* intra_frame_callback,
+ RtcpBandwidthObserver* bandwidth_callback,
+ TransportFeedbackObserver* transport_feedback_callback,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpPacketSender* paced_sender,
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator,
+ BitrateStatisticsObserver* send_bitrate_observer,
+ FrameCountObserver* send_frame_count_observer,
+ SendSideDelayObserver* send_side_delay_observer,
+ size_t num_modules) {
+ RTC_DCHECK_GT(num_modules, 0u);
+ RtpRtcp::Configuration configuration;
+ ReceiveStatistics* null_receive_statistics = configuration.receive_statistics;
+ configuration.audio = false;
+ configuration.receiver_only = receiver_only;
+ configuration.receive_statistics = receive_statistics;
+ configuration.outgoing_transport = outgoing_transport;
+ configuration.intra_frame_callback = intra_frame_callback;
+ configuration.rtt_stats = rtt_stats;
+ configuration.rtcp_packet_type_counter_observer =
+ rtcp_packet_type_counter_observer;
+ configuration.paced_sender = paced_sender;
+ configuration.transport_sequence_number_allocator =
+ transport_sequence_number_allocator;
+ configuration.send_bitrate_observer = send_bitrate_observer;
+ configuration.send_frame_count_observer = send_frame_count_observer;
+ configuration.send_side_delay_observer = send_side_delay_observer;
+ configuration.bandwidth_callback = bandwidth_callback;
+ configuration.transport_feedback_callback = transport_feedback_callback;
+
+ std::vector<RtpRtcp*> modules;
+ for (size_t i = 0; i < num_modules; ++i) {
+ RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration);
+ rtp_rtcp->SetSendingStatus(false);
+ rtp_rtcp->SetSendingMediaStatus(false);
+ rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
+ modules.push_back(rtp_rtcp);
+ // Receive statistics and remote bitrate estimator should only be set for
+ // the primary (first) module.
+ configuration.receive_statistics = null_receive_statistics;
+ configuration.remote_bitrate_estimator = nullptr;
+ }
+ return modules;
+}
+
+void ViEChannel::StartDecodeThread() {
+ RTC_DCHECK(!sender_);
+ if (decode_thread_.IsRunning())
+ return;
+ // Start the decode thread
+ decode_thread_.Start();
+ decode_thread_.SetPriority(rtc::kHighestPriority);
+}
+
+void ViEChannel::StopDecodeThread() {
+ vcm_->TriggerDecoderShutdown();
+
+ decode_thread_.Stop();
+}
+
+int32_t ViEChannel::SetVoiceChannel(int32_t ve_channel_id,
+ VoEVideoSync* ve_sync_interface) {
+ return vie_sync_.ConfigureSync(ve_channel_id, ve_sync_interface,
+ rtp_rtcp_modules_[0],
+ vie_receiver_.GetRtpReceiver());
+}
+
+int32_t ViEChannel::VoiceChannel() {
+ return vie_sync_.VoiceChannel();
+}
+
+void ViEChannel::RegisterPreRenderCallback(
+ I420FrameCallback* pre_render_callback) {
+ CriticalSectionScoped cs(crit_.get());
+ pre_render_callback_ = pre_render_callback;
+}
+
+void ViEChannel::RegisterPreDecodeImageCallback(
+ EncodedImageCallback* pre_decode_callback) {
+ vcm_->RegisterPreDecodeImageCallback(pre_decode_callback);
+}
+
+// TODO(pbos): Remove OnInitializeDecoder which is called from the RTP module,
+// any decoder resetting should be handled internally within the VCM.
+int32_t ViEChannel::OnInitializeDecoder(
+ const int8_t payload_type,
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const size_t channels,
+ const uint32_t rate) {
+ LOG(LS_INFO) << "OnInitializeDecoder " << static_cast<int>(payload_type)
+ << " " << payload_name;
+ vcm_->ResetDecoder();
+
+ return 0;
+}
+
+void ViEChannel::OnIncomingSSRCChanged(const uint32_t ssrc) {
+ rtp_rtcp_modules_[0]->SetRemoteSSRC(ssrc);
+}
+
+void ViEChannel::OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) {}
+
+void ViEChannel::RegisterSendFrameCountObserver(
+ FrameCountObserver* observer) {
+ send_frame_count_observer_.Set(observer);
+}
+
+void ViEChannel::RegisterReceiveStatisticsProxy(
+ ReceiveStatisticsProxy* receive_statistics_proxy) {
+ CriticalSectionScoped cs(crit_.get());
+ receive_stats_callback_ = receive_statistics_proxy;
+}
+
+void ViEChannel::SetIncomingVideoStream(
+ IncomingVideoStream* incoming_video_stream) {
+ CriticalSectionScoped cs(crit_.get());
+ incoming_video_stream_ = incoming_video_stream;
+}
+} // namespace webrtc
diff --git a/webrtc/video/vie_channel.h b/webrtc/video/vie_channel.h
new file mode 100644
index 0000000000..4ba394817f
--- /dev/null
+++ b/webrtc/video/vie_channel.h
@@ -0,0 +1,454 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIE_CHANNEL_H_
+#define WEBRTC_VIDEO_VIE_CHANNEL_H_
+
+#include <list>
+#include <map>
+#include <vector>
+
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video/vie_receiver.h"
+#include "webrtc/video/vie_sync_module.h"
+
+namespace webrtc {
+
+class CallStatsObserver;
+class ChannelStatsObserver;
+class Config;
+class CriticalSectionWrapper;
+class EncodedImageCallback;
+class I420FrameCallback;
+class IncomingVideoStream;
+class PacedSender;
+class PacketRouter;
+class PayloadRouter;
+class ProcessThread;
+class ReceiveStatisticsProxy;
+class ReportBlockStats;
+class RtcpRttStats;
+class ViEChannelProtectionCallback;
+class ViERTPObserver;
+class VideoCodingModule;
+class VideoDecoder;
+class VideoRenderCallback;
+class VoEVideoSync;
+
+enum StreamType {
+ kViEStreamTypeNormal = 0, // Normal media stream
+ kViEStreamTypeRtx = 1 // Retransmission media stream
+};
+
+class ViEChannel : public VCMFrameTypeCallback,
+ public VCMReceiveCallback,
+ public VCMReceiveStatisticsCallback,
+ public VCMDecoderTimingCallback,
+ public VCMPacketRequestCallback,
+ public RtpFeedback {
+ public:
+ friend class ChannelStatsObserver;
+ friend class ViEChannelProtectionCallback;
+
+ ViEChannel(uint32_t number_of_cores,
+ Transport* transport,
+ ProcessThread* module_process_thread,
+ RtcpIntraFrameObserver* intra_frame_observer,
+ RtcpBandwidthObserver* bandwidth_observer,
+ TransportFeedbackObserver* transport_feedback_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtcpRttStats* rtt_stats,
+ PacedSender* paced_sender,
+ PacketRouter* packet_router,
+ size_t max_rtp_streams,
+ bool sender);
+ ~ViEChannel();
+
+ int32_t Init();
+
+ // Sets the encoder to use for the channel. |new_stream| indicates the encoder
+ // type has changed and we should start a new RTP stream.
+ int32_t SetSendCodec(const VideoCodec& video_codec, bool new_stream = true);
+ int32_t SetReceiveCodec(const VideoCodec& video_codec);
+ // Registers an external decoder.
+ void RegisterExternalDecoder(const uint8_t pl_type, VideoDecoder* decoder);
+ int32_t ReceiveCodecStatistics(uint32_t* num_key_frames,
+ uint32_t* num_delta_frames);
+ uint32_t DiscardedPackets() const;
+
+ // Returns the estimated delay in milliseconds.
+ int ReceiveDelay() const;
+
+ void SetExpectedRenderDelay(int delay_ms);
+
+ void SetRTCPMode(const RtcpMode rtcp_mode);
+ void SetProtectionMode(bool enable_nack,
+ bool enable_fec,
+ int payload_type_red,
+ int payload_type_fec);
+ bool IsSendingFecEnabled();
+ int SetSenderBufferingMode(int target_delay_ms);
+ int SetSendTimestampOffsetStatus(bool enable, int id);
+ int SetReceiveTimestampOffsetStatus(bool enable, int id);
+ int SetSendAbsoluteSendTimeStatus(bool enable, int id);
+ int SetReceiveAbsoluteSendTimeStatus(bool enable, int id);
+ int SetSendVideoRotationStatus(bool enable, int id);
+ int SetReceiveVideoRotationStatus(bool enable, int id);
+ int SetSendTransportSequenceNumber(bool enable, int id);
+ int SetReceiveTransportSequenceNumber(bool enable, int id);
+ void SetRtcpXrRrtrStatus(bool enable);
+ void EnableTMMBR(bool enable);
+
+ // Sets SSRC for outgoing stream.
+ int32_t SetSSRC(const uint32_t SSRC,
+ const StreamType usage,
+ const unsigned char simulcast_idx);
+
+ // Gets SSRC for outgoing stream number |idx|.
+ int32_t GetLocalSSRC(uint8_t idx, unsigned int* ssrc);
+
+ // Gets SSRC for the incoming stream.
+ uint32_t GetRemoteSSRC();
+
+ int SetRtxSendPayloadType(int payload_type, int associated_payload_type);
+ void SetRtxReceivePayloadType(int payload_type, int associated_payload_type);
+ // If set to true, the RTX payload type mapping supplied in
+ // |SetRtxReceivePayloadType| will be used when restoring RTX packets. Without
+ // it, RTX packets will always be restored to the last non-RTX packet payload
+ // type received.
+ void SetUseRtxPayloadMappingOnRestore(bool val);
+
+ void SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state);
+ RtpState GetRtpStateForSsrc(uint32_t ssrc);
+
+ // Sets the CName for the outgoing stream on the channel.
+ int32_t SetRTCPCName(const char* rtcp_cname);
+
+ // Gets the CName of the incoming stream.
+ int32_t GetRemoteRTCPCName(char rtcp_cname[]);
+
+ // Returns statistics reported by the remote client in an RTCP packet.
+ // TODO(pbos): Remove this along with VideoSendStream::GetRtt().
+ int32_t GetSendRtcpStatistics(uint16_t* fraction_lost,
+ uint32_t* cumulative_lost,
+ uint32_t* extended_max,
+ uint32_t* jitter_samples,
+ int64_t* rtt_ms);
+
+ // Called on receipt of RTCP report block from remote side.
+ void RegisterSendChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback);
+
+ // Called on generation of RTCP stats
+ void RegisterReceiveChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback);
+
+ // Gets send statistics for the rtp and rtx stream.
+ void GetSendStreamDataCounters(StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const;
+
+ // Gets received stream data counters.
+ void GetReceiveStreamDataCounters(StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const;
+
+ // Called on update of RTP statistics.
+ void RegisterSendChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback);
+
+ // Called on update of RTP statistics.
+ void RegisterReceiveChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback);
+
+ void GetSendRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const;
+
+ void GetReceiveRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const;
+
+ void RegisterSendSideDelayObserver(SendSideDelayObserver* observer);
+
+ // Called on any new send bitrate estimate.
+ void RegisterSendBitrateObserver(BitrateStatisticsObserver* observer);
+
+ // Implements RtpFeedback.
+ int32_t OnInitializeDecoder(const int8_t payload_type,
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const size_t channels,
+ const uint32_t rate) override;
+ void OnIncomingSSRCChanged(const uint32_t ssrc) override;
+ void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) override;
+
+ int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC);
+
+ int32_t StartSend();
+ int32_t StopSend();
+ bool Sending();
+ void StartReceive();
+ void StopReceive();
+
+ int32_t ReceivedRTPPacket(const void* rtp_packet,
+ const size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ int32_t ReceivedRTCPPacket(const void* rtcp_packet,
+ const size_t rtcp_packet_length);
+
+ // Sets the maximum transfer unit size for the network link, i.e. including
+ // IP, UDP and RTP headers.
+ int32_t SetMTU(uint16_t mtu);
+
+ // Gets the modules used by the channel.
+ RtpRtcp* rtp_rtcp();
+ rtc::scoped_refptr<PayloadRouter> send_payload_router();
+ VCMProtectionCallback* vcm_protection_callback();
+
+
+ CallStatsObserver* GetStatsObserver();
+
+ // Implements VCMReceiveCallback.
+ virtual int32_t FrameToRender(VideoFrame& video_frame); // NOLINT
+
+ // Implements VCMReceiveCallback.
+ virtual int32_t ReceivedDecodedReferenceFrame(
+ const uint64_t picture_id);
+
+ // Implements VCMReceiveCallback.
+ void OnIncomingPayloadType(int payload_type) override;
+ void OnDecoderImplementationName(const char* implementation_name) override;
+
+ // Implements VCMReceiveStatisticsCallback.
+ void OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) override;
+ void OnDiscardedPacketsUpdated(int discarded_packets) override;
+ void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
+
+ // Implements VCMDecoderTimingCallback.
+ virtual void OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms);
+
+ // Implements FrameTypeCallback.
+ virtual int32_t RequestKeyFrame();
+
+ // Implements FrameTypeCallback.
+ virtual int32_t SliceLossIndicationRequest(
+ const uint64_t picture_id);
+
+ // Implements VideoPacketRequestCallback.
+ int32_t ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) override;
+
+ int32_t SetVoiceChannel(int32_t ve_channel_id,
+ VoEVideoSync* ve_sync_interface);
+ int32_t VoiceChannel();
+
+ // New-style callbacks, used by VideoReceiveStream.
+ void RegisterPreRenderCallback(I420FrameCallback* pre_render_callback);
+ void RegisterPreDecodeImageCallback(
+ EncodedImageCallback* pre_decode_callback);
+
+ void RegisterSendFrameCountObserver(FrameCountObserver* observer);
+ void RegisterRtcpPacketTypeCounterObserver(
+ RtcpPacketTypeCounterObserver* observer);
+ void RegisterReceiveStatisticsProxy(
+ ReceiveStatisticsProxy* receive_statistics_proxy);
+ void SetIncomingVideoStream(IncomingVideoStream* incoming_video_stream);
+
+ protected:
+ static bool ChannelDecodeThreadFunction(void* obj);
+ bool ChannelDecodeProcess();
+
+ void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms);
+
+ int ProtectionRequest(const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps);
+
+ private:
+ static std::vector<RtpRtcp*> CreateRtpRtcpModules(
+ bool receiver_only,
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpIntraFrameObserver* intra_frame_callback,
+ RtcpBandwidthObserver* bandwidth_callback,
+ TransportFeedbackObserver* transport_feedback_callback,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpPacketSender* paced_sender,
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator,
+ BitrateStatisticsObserver* send_bitrate_observer,
+ FrameCountObserver* send_frame_count_observer,
+ SendSideDelayObserver* send_side_delay_observer,
+ size_t num_modules);
+
+ // Assumed to be protected.
+ void StartDecodeThread();
+ void StopDecodeThread();
+
+ void ProcessNACKRequest(const bool enable);
+ // Compute NACK list parameters for the buffering mode.
+ int GetRequiredNackListSize(int target_delay_ms);
+ void SetRtxSendStatus(bool enable);
+
+ void UpdateHistograms();
+
+ // ViEChannel exposes methods that allow to modify observers and callbacks
+ // to be modified. Such an API-style is cumbersome to implement and maintain
+ // at all the levels when comparing to only setting them at construction. As
+ // so this class instantiates its children with a wrapper that can be modified
+ // at a later time.
+ template <class T>
+ class RegisterableCallback : public T {
+ public:
+ RegisterableCallback()
+ : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+ callback_(NULL) {}
+
+ void Set(T* callback) {
+ CriticalSectionScoped cs(critsect_.get());
+ callback_ = callback;
+ }
+
+ protected:
+ // Note: this should be implemented with a RW-lock to allow simultaneous
+ // calls into the callback. However that doesn't seem to be needed for the
+ // current type of callbacks covered by this class.
+ rtc::scoped_ptr<CriticalSectionWrapper> critsect_;
+ T* callback_ GUARDED_BY(critsect_);
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(RegisterableCallback);
+ };
+
+ class RegisterableBitrateStatisticsObserver:
+ public RegisterableCallback<BitrateStatisticsObserver> {
+ virtual void Notify(const BitrateStatistics& total_stats,
+ const BitrateStatistics& retransmit_stats,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->Notify(total_stats, retransmit_stats, ssrc);
+ }
+ } send_bitrate_observer_;
+
+ class RegisterableFrameCountObserver
+ : public RegisterableCallback<FrameCountObserver> {
+ public:
+ virtual void FrameCountUpdated(const FrameCounts& frame_counts,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->FrameCountUpdated(frame_counts, ssrc);
+ }
+
+ private:
+ } send_frame_count_observer_;
+
+ class RegisterableSendSideDelayObserver :
+ public RegisterableCallback<SendSideDelayObserver> {
+ void SendSideDelayUpdated(int avg_delay_ms,
+ int max_delay_ms,
+ uint32_t ssrc) override {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, ssrc);
+ }
+ } send_side_delay_observer_;
+
+ class RegisterableRtcpPacketTypeCounterObserver
+ : public RegisterableCallback<RtcpPacketTypeCounterObserver> {
+ public:
+ void RtcpPacketTypesCounterUpdated(
+ uint32_t ssrc,
+ const RtcpPacketTypeCounter& packet_counter) override {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->RtcpPacketTypesCounterUpdated(ssrc, packet_counter);
+ counter_map_[ssrc] = packet_counter;
+ }
+
+ virtual std::map<uint32_t, RtcpPacketTypeCounter> GetPacketTypeCounterMap()
+ const {
+ CriticalSectionScoped cs(critsect_.get());
+ return counter_map_;
+ }
+
+ private:
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map_
+ GUARDED_BY(critsect_);
+ } rtcp_packet_type_counter_observer_;
+
+ const uint32_t number_of_cores_;
+ const bool sender_;
+
+ ProcessThread* const module_process_thread_;
+
+ // Used for all registered callbacks except rendering.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Owned modules/classes.
+ rtc::scoped_refptr<PayloadRouter> send_payload_router_;
+ rtc::scoped_ptr<ViEChannelProtectionCallback> vcm_protection_callback_;
+
+ VideoCodingModule* const vcm_;
+ ViEReceiver vie_receiver_;
+ ViESyncModule vie_sync_;
+
+ // Helper to report call statistics.
+ rtc::scoped_ptr<ChannelStatsObserver> stats_observer_;
+
+ // Not owned.
+ ReceiveStatisticsProxy* receive_stats_callback_ GUARDED_BY(crit_);
+ FrameCounts receive_frame_counts_ GUARDED_BY(crit_);
+ IncomingVideoStream* incoming_video_stream_ GUARDED_BY(crit_);
+ RtcpIntraFrameObserver* const intra_frame_observer_;
+ RtcpRttStats* const rtt_stats_;
+ PacedSender* const paced_sender_;
+ PacketRouter* const packet_router_;
+
+ const rtc::scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
+ TransportFeedbackObserver* const transport_feedback_observer_;
+
+ rtc::PlatformThread decode_thread_;
+
+ int nack_history_size_sender_;
+ int max_nack_reordering_threshold_;
+ I420FrameCallback* pre_render_callback_ GUARDED_BY(crit_);
+
+ const rtc::scoped_ptr<ReportBlockStats> report_block_stats_sender_;
+
+ int64_t time_of_first_rtt_ms_ GUARDED_BY(crit_);
+ int64_t rtt_sum_ms_ GUARDED_BY(crit_);
+ int64_t last_rtt_ms_ GUARDED_BY(crit_);
+ size_t num_rtts_ GUARDED_BY(crit_);
+
+ // RtpRtcp modules, declared last as they use other members on construction.
+ const std::vector<RtpRtcp*> rtp_rtcp_modules_;
+ size_t num_active_rtp_rtcp_modules_ GUARDED_BY(crit_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_CHANNEL_H_
diff --git a/webrtc/video/vie_codec_unittest.cc b/webrtc/video/vie_codec_unittest.cc
new file mode 100644
index 0000000000..9f648ec521
--- /dev/null
+++ b/webrtc/video/vie_codec_unittest.cc
@@ -0,0 +1,230 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_types.h"
+
+namespace webrtc {
+
+// Builds VP8 codec with 0 simulcast streams.
+void BuildVP8Codec(webrtc::VideoCodec* video_codec) {
+ video_codec->codecType = kVideoCodecVP8;
+ strncpy(video_codec->plName, "VP8", 4);
+ video_codec->plType = 100;
+ video_codec->width = 1280;
+ video_codec->height = 720;
+
+ video_codec->startBitrate = 1000; // kbps
+ video_codec->maxBitrate = 2000; // kbps
+ video_codec->minBitrate = 1000; // kbps
+ video_codec->maxFramerate = 30;
+
+ video_codec->qpMax = 50;
+ video_codec->numberOfSimulcastStreams = 0;
+ video_codec->mode = kRealtimeVideo;
+
+ // Set VP8 codec specific info.
+ video_codec->codecSpecific.VP8.pictureLossIndicationOn = true;
+ video_codec->codecSpecific.VP8.feedbackModeOn = true;
+ video_codec->codecSpecific.VP8.complexity = kComplexityNormal;
+ video_codec->codecSpecific.VP8.resilience = kResilienceOff;
+ video_codec->codecSpecific.VP8.numberOfTemporalLayers = 0;
+ video_codec->codecSpecific.VP8.denoisingOn = true;
+ video_codec->codecSpecific.VP8.errorConcealmentOn = true;
+ video_codec->codecSpecific.VP8.automaticResizeOn = true;
+ video_codec->codecSpecific.VP8.frameDroppingOn = true;
+ video_codec->codecSpecific.VP8.keyFrameInterval = 200;
+}
+
+
+void SetSimulcastSettings(webrtc::VideoCodec* video_codec) {
+ // Simulcast settings.
+ video_codec->numberOfSimulcastStreams = 1;
+ video_codec->simulcastStream[0].width = 320;
+ video_codec->simulcastStream[0].height = 180;
+ video_codec->simulcastStream[0].numberOfTemporalLayers = 0;
+ video_codec->simulcastStream[0].maxBitrate = 100;
+ video_codec->simulcastStream[0].targetBitrate = 100;
+ video_codec->simulcastStream[0].minBitrate = 0;
+ video_codec->simulcastStream[0].qpMax = video_codec->qpMax;
+}
+
+
+// This test compares two VideoCodecInst objects except codec specific and
+// simulcast streams.
+TEST(ViECodecTest, TestCompareCodecs) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+
+ EXPECT_TRUE(codec1 == codec2);
+ EXPECT_FALSE(codec1 != codec2);
+
+ // plname is case insensitive.
+ strncpy(codec2.plName, "vp8", 4);
+ EXPECT_TRUE(codec1 == codec2);
+
+ codec2.codecType = kVideoCodecUnknown;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify pltype.
+ BuildVP8Codec(&codec2);
+ codec2.plType = 101;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modifing height and width.
+ BuildVP8Codec(&codec2);
+ codec2.width = 640;
+ codec2.height = 480;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify framerate, default value is 30.
+ BuildVP8Codec(&codec2);
+ codec2.maxFramerate = 15;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modifying startBitrate, default value is 1000 kbps.
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 2000;
+ EXPECT_FALSE(codec1 == codec2);
+ // maxBitrate
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 3000;
+ EXPECT_FALSE(codec1 == codec2);
+ // minBirate
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 500;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify qpMax.
+ BuildVP8Codec(&codec2);
+ codec2.qpMax = 100;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify mode
+ BuildVP8Codec(&codec2);
+ codec2.mode = kScreensharing;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+// Test VP8 specific comparision.
+TEST(ViECodecTest, TestCompareVP8CodecSpecific) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+ EXPECT_TRUE(codec1 == codec2);
+
+ // pictureLossIndicationOn
+ codec2.codecSpecific.VP8.pictureLossIndicationOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // feedbackModeOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.feedbackModeOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // complexity
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.complexity = kComplexityHigh;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // resilience
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.resilience = kResilientStream;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // numberOfTemporalLayers
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.numberOfTemporalLayers = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // denoisingOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.denoisingOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // errorConcealmentOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.errorConcealmentOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // pictureLossIndicationOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.automaticResizeOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // frameDroppingOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.frameDroppingOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // keyFrameInterval
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.keyFrameInterval = 100;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+// This test compares simulcast stream information in VideoCodec.
+TEST(ViECodecTest, TestCompareSimulcastStreams) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+ // Set simulacast settings.
+ SetSimulcastSettings(&codec1);
+ SetSimulcastSettings(&codec2);
+ EXPECT_TRUE(codec1 == codec2);
+
+ // Modify number of streams.
+ codec2.numberOfSimulcastStreams = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Resetting steram count.
+ codec2.numberOfSimulcastStreams = 1;
+ // Modify height and width in codec2.
+ codec2.simulcastStream[0].width = 640;
+ codec2.simulcastStream[0].height = 480;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // numberOfTemporalLayers
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].numberOfTemporalLayers = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // maxBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].maxBitrate = 1000;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // targetBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].targetBitrate = 1000;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // minBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].minBitrate = 50;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // qpMax
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].qpMax = 100;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc
new file mode 100644
index 0000000000..a147b2415c
--- /dev/null
+++ b/webrtc/video/vie_encoder.cc
@@ -0,0 +1,634 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_encoder.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/call/bitrate_allocator.h"
+#include "webrtc/common_video/include/video_image.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/payload_router.h"
+#include "webrtc/video/send_statistics_proxy.h"
+
+namespace webrtc {
+
+// Margin on when we pause the encoder when the pacing buffer overflows relative
+// to the configured buffer delay.
+static const float kEncoderPausePacerMargin = 2.0f;
+
+// Don't stop the encoder unless the delay is above this configured value.
+static const int kMinPacingDelayMs = 200;
+
+static const float kStopPaddingThresholdMs = 2000;
+
+static const int kMinKeyFrameRequestIntervalMs = 300;
+
+std::vector<uint32_t> AllocateStreamBitrates(
+ uint32_t total_bitrate,
+ const SimulcastStream* stream_configs,
+ size_t number_of_streams) {
+ if (number_of_streams == 0) {
+ std::vector<uint32_t> stream_bitrates(1, 0);
+ stream_bitrates[0] = total_bitrate;
+ return stream_bitrates;
+ }
+ std::vector<uint32_t> stream_bitrates(number_of_streams, 0);
+ uint32_t bitrate_remainder = total_bitrate;
+ for (size_t i = 0; i < stream_bitrates.size() && bitrate_remainder > 0; ++i) {
+ if (stream_configs[i].maxBitrate * 1000 > bitrate_remainder) {
+ stream_bitrates[i] = bitrate_remainder;
+ } else {
+ stream_bitrates[i] = stream_configs[i].maxBitrate * 1000;
+ }
+ bitrate_remainder -= stream_bitrates[i];
+ }
+ return stream_bitrates;
+}
+
+class QMVideoSettingsCallback : public VCMQMSettingsCallback {
+ public:
+ explicit QMVideoSettingsCallback(VideoProcessing* vpm);
+
+ ~QMVideoSettingsCallback();
+
+ // Update VPM with QM (quality modes: frame size & frame rate) settings.
+ int32_t SetVideoQMSettings(const uint32_t frame_rate,
+ const uint32_t width,
+ const uint32_t height);
+
+ // Update target frame rate.
+ void SetTargetFramerate(int frame_rate);
+
+ private:
+ VideoProcessing* vp_;
+};
+
+class ViEBitrateObserver : public BitrateObserver {
+ public:
+ explicit ViEBitrateObserver(ViEEncoder* owner)
+ : owner_(owner) {
+ }
+ virtual ~ViEBitrateObserver() {}
+ // Implements BitrateObserver.
+ virtual void OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t rtt) {
+ owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
+ }
+ private:
+ ViEEncoder* owner_;
+};
+
+ViEEncoder::ViEEncoder(uint32_t number_of_cores,
+ ProcessThread* module_process_thread,
+ SendStatisticsProxy* stats_proxy,
+ I420FrameCallback* pre_encode_callback,
+ PacedSender* pacer,
+ BitrateAllocator* bitrate_allocator)
+ : number_of_cores_(number_of_cores),
+ vp_(VideoProcessing::Create()),
+ qm_callback_(new QMVideoSettingsCallback(vp_.get())),
+ vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
+ this,
+ qm_callback_.get())),
+ send_payload_router_(NULL),
+ data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ stats_proxy_(stats_proxy),
+ pre_encode_callback_(pre_encode_callback),
+ pacer_(pacer),
+ bitrate_allocator_(bitrate_allocator),
+ time_of_last_frame_activity_ms_(0),
+ encoder_config_(),
+ min_transmit_bitrate_kbps_(0),
+ last_observed_bitrate_bps_(0),
+ target_delay_ms_(0),
+ network_is_transmitting_(true),
+ encoder_paused_(false),
+ encoder_paused_and_dropped_frame_(false),
+ module_process_thread_(module_process_thread),
+ has_received_sli_(false),
+ picture_id_sli_(0),
+ has_received_rpsi_(false),
+ picture_id_rpsi_(0),
+ video_suspended_(false) {
+ bitrate_observer_.reset(new ViEBitrateObserver(this));
+}
+
+bool ViEEncoder::Init() {
+ vp_->EnableTemporalDecimation(true);
+
+ // Enable/disable content analysis: off by default for now.
+ vp_->EnableContentAnalysis(false);
+
+ if (vcm_->RegisterTransportCallback(this) != 0) {
+ return false;
+ }
+ if (vcm_->RegisterSendStatisticsCallback(this) != 0) {
+ return false;
+ }
+ return true;
+}
+
+void ViEEncoder::StartThreadsAndSetSharedMembers(
+ rtc::scoped_refptr<PayloadRouter> send_payload_router,
+ VCMProtectionCallback* vcm_protection_callback) {
+ RTC_DCHECK(send_payload_router_ == NULL);
+
+ send_payload_router_ = send_payload_router;
+ vcm_->RegisterProtectionCallback(vcm_protection_callback);
+ module_process_thread_->RegisterModule(vcm_.get());
+}
+
+void ViEEncoder::StopThreadsAndRemoveSharedMembers() {
+ if (bitrate_allocator_)
+ bitrate_allocator_->RemoveBitrateObserver(bitrate_observer_.get());
+ module_process_thread_->DeRegisterModule(vcm_.get());
+}
+
+ViEEncoder::~ViEEncoder() {
+}
+
+void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ network_is_transmitting_ = is_transmitting;
+ }
+}
+
+void ViEEncoder::Pause() {
+ CriticalSectionScoped cs(data_cs_.get());
+ encoder_paused_ = true;
+}
+
+void ViEEncoder::Restart() {
+ CriticalSectionScoped cs(data_cs_.get());
+ encoder_paused_ = false;
+}
+
+int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
+ uint8_t pl_type,
+ bool internal_source) {
+ if (vcm_->RegisterExternalEncoder(encoder, pl_type, internal_source) !=
+ VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
+ if (vcm_->RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+ // Setting target width and height for VPM.
+ if (vp_->SetTargetResolution(video_codec.width, video_codec.height,
+ video_codec.maxFramerate) != VPM_OK) {
+ return -1;
+ }
+
+ // Cache codec before calling AddBitrateObserver (which calls OnNetworkChanged
+ // that makes use of the number of simulcast streams configured).
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ encoder_config_ = video_codec;
+ }
+
+ // Add a bitrate observer to the allocator and update the start, max and
+ // min bitrates of the bitrate controller as needed.
+ int allocated_bitrate_bps = bitrate_allocator_->AddBitrateObserver(
+ bitrate_observer_.get(), video_codec.minBitrate * 1000,
+ video_codec.maxBitrate * 1000);
+
+ webrtc::VideoCodec modified_video_codec = video_codec;
+ modified_video_codec.startBitrate = allocated_bitrate_bps / 1000;
+
+ size_t max_data_payload_length = send_payload_router_->MaxPayloadLength();
+ if (vcm_->RegisterSendCodec(&modified_video_codec, number_of_cores_,
+ static_cast<uint32_t>(max_data_payload_length)) !=
+ VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int ViEEncoder::GetPaddingNeededBps() const {
+ int64_t time_of_last_frame_activity_ms;
+ int min_transmit_bitrate_bps;
+ int bitrate_bps;
+ VideoCodec send_codec;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ bool send_padding = encoder_config_.numberOfSimulcastStreams > 1 ||
+ video_suspended_ || min_transmit_bitrate_kbps_ > 0;
+ if (!send_padding)
+ return 0;
+ time_of_last_frame_activity_ms = time_of_last_frame_activity_ms_;
+ min_transmit_bitrate_bps = 1000 * min_transmit_bitrate_kbps_;
+ bitrate_bps = last_observed_bitrate_bps_;
+ send_codec = encoder_config_;
+ }
+
+ bool video_is_suspended = vcm_->VideoSuspended();
+
+ // Find the max amount of padding we can allow ourselves to send at this
+ // point, based on which streams are currently active and what our current
+ // available bandwidth is.
+ int pad_up_to_bitrate_bps = 0;
+ if (send_codec.numberOfSimulcastStreams == 0) {
+ pad_up_to_bitrate_bps = send_codec.minBitrate * 1000;
+ } else {
+ SimulcastStream* stream_configs = send_codec.simulcastStream;
+ pad_up_to_bitrate_bps =
+ stream_configs[send_codec.numberOfSimulcastStreams - 1].minBitrate *
+ 1000;
+ for (int i = 0; i < send_codec.numberOfSimulcastStreams - 1; ++i) {
+ pad_up_to_bitrate_bps += stream_configs[i].targetBitrate * 1000;
+ }
+ }
+
+ // Disable padding if only sending one stream and video isn't suspended and
+ // min-transmit bitrate isn't used (applied later).
+ if (!video_is_suspended && send_codec.numberOfSimulcastStreams <= 1)
+ pad_up_to_bitrate_bps = 0;
+
+ // The amount of padding should decay to zero if no frames are being
+ // captured/encoded unless a min-transmit bitrate is used.
+ int64_t now_ms = TickTime::MillisecondTimestamp();
+ if (now_ms - time_of_last_frame_activity_ms > kStopPaddingThresholdMs)
+ pad_up_to_bitrate_bps = 0;
+
+ // Pad up to min bitrate.
+ if (pad_up_to_bitrate_bps < min_transmit_bitrate_bps)
+ pad_up_to_bitrate_bps = min_transmit_bitrate_bps;
+
+ // Padding may never exceed bitrate estimate.
+ if (pad_up_to_bitrate_bps > bitrate_bps)
+ pad_up_to_bitrate_bps = bitrate_bps;
+
+ return pad_up_to_bitrate_bps;
+}
+
+bool ViEEncoder::EncoderPaused() const {
+ // Pause video if paused by caller or as long as the network is down or the
+ // pacer queue has grown too large in buffered mode.
+ if (encoder_paused_) {
+ return true;
+ }
+ if (target_delay_ms_ > 0) {
+ // Buffered mode.
+ // TODO(pwestin): Workaround until nack is configured as a time and not
+ // number of packets.
+ return pacer_->QueueInMs() >=
+ std::max(
+ static_cast<int>(target_delay_ms_ * kEncoderPausePacerMargin),
+ kMinPacingDelayMs);
+ }
+ if (pacer_->ExpectedQueueTimeMs() > PacedSender::kMaxQueueLengthMs) {
+ // Too much data in pacer queue, drop frame.
+ return true;
+ }
+ return !network_is_transmitting_;
+}
+
+void ViEEncoder::TraceFrameDropStart() {
+ // Start trace event only on the first frame after encoder is paused.
+ if (!encoder_paused_and_dropped_frame_) {
+ TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this);
+ }
+ encoder_paused_and_dropped_frame_ = true;
+ return;
+}
+
+void ViEEncoder::TraceFrameDropEnd() {
+ // End trace event on first frame after encoder resumes, if frame was dropped.
+ if (encoder_paused_and_dropped_frame_) {
+ TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this);
+ }
+ encoder_paused_and_dropped_frame_ = false;
+}
+
+void ViEEncoder::DeliverFrame(VideoFrame video_frame) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+ if (!send_payload_router_->active()) {
+ // We've paused or we have no channels attached, don't waste resources on
+ // encoding.
+ return;
+ }
+ VideoCodecType codec_type;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+ if (EncoderPaused()) {
+ TraceFrameDropStart();
+ return;
+ }
+ TraceFrameDropEnd();
+ codec_type = encoder_config_.codecType;
+ }
+
+ TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
+ "Encode");
+ const VideoFrame* frame_to_send = &video_frame;
+ // TODO(wuchengli): support texture frames.
+ if (video_frame.native_handle() == NULL) {
+ // Pass frame via preprocessor.
+ frame_to_send = vp_->PreprocessFrame(video_frame);
+ if (!frame_to_send) {
+ // Drop this frame, or there was an error processing it.
+ return;
+ }
+ }
+
+ // If we haven't resampled the frame and we have a FrameCallback, we need to
+ // make a deep copy of |video_frame|.
+ VideoFrame copied_frame;
+ if (pre_encode_callback_) {
+ copied_frame.CopyFrame(*frame_to_send);
+ pre_encode_callback_->FrameCallback(&copied_frame);
+ frame_to_send = &copied_frame;
+ }
+
+ if (codec_type == webrtc::kVideoCodecVP8) {
+ webrtc::CodecSpecificInfo codec_specific_info;
+ codec_specific_info.codecType = webrtc::kVideoCodecVP8;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ codec_specific_info.codecSpecific.VP8.hasReceivedRPSI =
+ has_received_rpsi_;
+ codec_specific_info.codecSpecific.VP8.hasReceivedSLI =
+ has_received_sli_;
+ codec_specific_info.codecSpecific.VP8.pictureIdRPSI =
+ picture_id_rpsi_;
+ codec_specific_info.codecSpecific.VP8.pictureIdSLI =
+ picture_id_sli_;
+ has_received_sli_ = false;
+ has_received_rpsi_ = false;
+ }
+
+ vcm_->AddVideoFrame(*frame_to_send, vp_->GetContentMetrics(),
+ &codec_specific_info);
+ return;
+ }
+ vcm_->AddVideoFrame(*frame_to_send);
+}
+
+int ViEEncoder::SendKeyFrame() {
+ return vcm_->IntraFrameRequest(0);
+}
+
+uint32_t ViEEncoder::LastObservedBitrateBps() const {
+ CriticalSectionScoped cs(data_cs_.get());
+ return last_observed_bitrate_bps_;
+}
+
+int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
+ if (vcm_->Bitrate(bitrate) != 0)
+ return -1;
+ return 0;
+}
+
+void ViEEncoder::SetProtectionMethod(bool nack, bool fec) {
+ // Set Video Protection for VCM.
+ VCMVideoProtection protection_mode;
+ if (fec) {
+ protection_mode =
+ nack ? webrtc::kProtectionNackFEC : kProtectionFEC;
+ } else {
+ protection_mode = nack ? kProtectionNack : kProtectionNone;
+ }
+ vcm_->SetVideoProtection(protection_mode, true);
+}
+
+void ViEEncoder::SetSenderBufferingMode(int target_delay_ms) {
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ target_delay_ms_ = target_delay_ms;
+ }
+ if (target_delay_ms > 0) {
+ // Disable external frame-droppers.
+ vcm_->EnableFrameDropper(false);
+ vp_->EnableTemporalDecimation(false);
+ } else {
+ // Real-time mode - enable frame droppers.
+ vp_->EnableTemporalDecimation(true);
+ vcm_->EnableFrameDropper(true);
+ }
+}
+
+void ViEEncoder::OnSetRates(uint32_t bitrate_bps, int framerate) {
+ if (stats_proxy_)
+ stats_proxy_->OnSetRates(bitrate_bps, framerate);
+}
+
+int32_t ViEEncoder::SendData(
+ const uint8_t payload_type,
+ const EncodedImage& encoded_image,
+ const webrtc::RTPFragmentationHeader& fragmentation_header,
+ const RTPVideoHeader* rtp_video_hdr) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+ }
+
+ if (stats_proxy_ != NULL)
+ stats_proxy_->OnSendEncodedImage(encoded_image, rtp_video_hdr);
+
+ return send_payload_router_->RoutePayload(
+ encoded_image._frameType, payload_type, encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, encoded_image._buffer,
+ encoded_image._length, &fragmentation_header, rtp_video_hdr)
+ ? 0
+ : -1;
+}
+
+void ViEEncoder::OnEncoderImplementationName(
+ const char* implementation_name) {
+ if (stats_proxy_)
+ stats_proxy_->OnEncoderImplementationName(implementation_name);
+}
+
+int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
+ const uint32_t frame_rate) {
+ if (stats_proxy_)
+ stats_proxy_->OnOutgoingRate(frame_rate, bit_rate);
+ return 0;
+}
+
+void ViEEncoder::OnReceivedSLI(uint32_t /*ssrc*/,
+ uint8_t picture_id) {
+ CriticalSectionScoped cs(data_cs_.get());
+ picture_id_sli_ = picture_id;
+ has_received_sli_ = true;
+}
+
+void ViEEncoder::OnReceivedRPSI(uint32_t /*ssrc*/,
+ uint64_t picture_id) {
+ CriticalSectionScoped cs(data_cs_.get());
+ picture_id_rpsi_ = picture_id;
+ has_received_rpsi_ = true;
+}
+
+void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ // Key frame request from remote side, signal to VCM.
+ TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
+
+ int idx = 0;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ auto stream_it = ssrc_streams_.find(ssrc);
+ if (stream_it == ssrc_streams_.end()) {
+ LOG_F(LS_WARNING) << "ssrc not found: " << ssrc << ", map size "
+ << ssrc_streams_.size();
+ return;
+ }
+ std::map<unsigned int, int64_t>::iterator time_it =
+ time_last_intra_request_ms_.find(ssrc);
+ if (time_it == time_last_intra_request_ms_.end()) {
+ time_last_intra_request_ms_[ssrc] = 0;
+ }
+
+ int64_t now = TickTime::MillisecondTimestamp();
+ if (time_last_intra_request_ms_[ssrc] + kMinKeyFrameRequestIntervalMs
+ > now) {
+ return;
+ }
+ time_last_intra_request_ms_[ssrc] = now;
+ idx = stream_it->second;
+ }
+ // Release the critsect before triggering key frame.
+ vcm_->IntraFrameRequest(idx);
+}
+
+void ViEEncoder::OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
+ CriticalSectionScoped cs(data_cs_.get());
+ std::map<unsigned int, int>::iterator it = ssrc_streams_.find(old_ssrc);
+ if (it == ssrc_streams_.end()) {
+ return;
+ }
+
+ ssrc_streams_[new_ssrc] = it->second;
+ ssrc_streams_.erase(it);
+
+ std::map<unsigned int, int64_t>::iterator time_it =
+ time_last_intra_request_ms_.find(old_ssrc);
+ int64_t last_intra_request_ms = 0;
+ if (time_it != time_last_intra_request_ms_.end()) {
+ last_intra_request_ms = time_it->second;
+ time_last_intra_request_ms_.erase(time_it);
+ }
+ time_last_intra_request_ms_[new_ssrc] = last_intra_request_ms;
+}
+
+void ViEEncoder::SetSsrcs(const std::vector<uint32_t>& ssrcs) {
+ CriticalSectionScoped cs(data_cs_.get());
+ ssrc_streams_.clear();
+ time_last_intra_request_ms_.clear();
+ int idx = 0;
+ for (uint32_t ssrc : ssrcs) {
+ ssrc_streams_[ssrc] = idx++;
+ }
+}
+
+void ViEEncoder::SetMinTransmitBitrate(int min_transmit_bitrate_kbps) {
+ assert(min_transmit_bitrate_kbps >= 0);
+ CriticalSectionScoped crit(data_cs_.get());
+ min_transmit_bitrate_kbps_ = min_transmit_bitrate_kbps;
+}
+
+// Called from ViEBitrateObserver.
+void ViEEncoder::OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms) {
+ LOG(LS_VERBOSE) << "OnNetworkChanged, bitrate" << bitrate_bps
+ << " packet loss " << static_cast<int>(fraction_lost)
+ << " rtt " << round_trip_time_ms;
+ RTC_DCHECK(send_payload_router_ != NULL);
+ vcm_->SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
+ bool video_is_suspended = vcm_->VideoSuspended();
+ bool video_suspension_changed;
+ VideoCodec send_codec;
+ uint32_t first_ssrc;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ last_observed_bitrate_bps_ = bitrate_bps;
+ video_suspension_changed = video_suspended_ != video_is_suspended;
+ video_suspended_ = video_is_suspended;
+ send_codec = encoder_config_;
+ first_ssrc = ssrc_streams_.begin()->first;
+ }
+
+ SimulcastStream* stream_configs = send_codec.simulcastStream;
+ // Allocate the bandwidth between the streams.
+ std::vector<uint32_t> stream_bitrates = AllocateStreamBitrates(
+ bitrate_bps, stream_configs, send_codec.numberOfSimulcastStreams);
+ send_payload_router_->SetTargetSendBitrates(stream_bitrates);
+
+ if (!video_suspension_changed)
+ return;
+ // Video suspend-state changed, inform codec observer.
+ LOG(LS_INFO) << "Video suspend state changed " << video_is_suspended
+ << " for ssrc " << first_ssrc;
+ if (stats_proxy_)
+ stats_proxy_->OnSuspendChange(video_is_suspended);
+}
+
+void ViEEncoder::SuspendBelowMinBitrate() {
+ vcm_->SuspendBelowMinBitrate();
+ bitrate_allocator_->EnforceMinBitrate(false);
+}
+
+void ViEEncoder::RegisterPostEncodeImageCallback(
+ EncodedImageCallback* post_encode_callback) {
+ vcm_->RegisterPostEncodeImageCallback(post_encode_callback);
+}
+
+QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessing* vpm)
+ : vp_(vpm) {
+}
+
+QMVideoSettingsCallback::~QMVideoSettingsCallback() {
+}
+
+int32_t QMVideoSettingsCallback::SetVideoQMSettings(
+ const uint32_t frame_rate,
+ const uint32_t width,
+ const uint32_t height) {
+ return vp_->SetTargetResolution(width, height, frame_rate);
+}
+
+void QMVideoSettingsCallback::SetTargetFramerate(int frame_rate) {
+ vp_->SetTargetFramerate(frame_rate);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_encoder.h b/webrtc/video/vie_encoder.h
new file mode 100644
index 0000000000..a15fd8920b
--- /dev/null
+++ b/webrtc/video/vie_encoder.h
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIE_ENCODER_H_
+#define WEBRTC_VIDEO_VIE_ENCODER_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/call/bitrate_allocator.h"
+#include "webrtc/common_types.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video/video_capture_input.h"
+
+namespace webrtc {
+
+class BitrateAllocator;
+class BitrateObserver;
+class Config;
+class CriticalSectionWrapper;
+class EncodedImageCallback;
+class PacedSender;
+class PayloadRouter;
+class ProcessThread;
+class QMVideoSettingsCallback;
+class SendStatisticsProxy;
+class ViEBitrateObserver;
+class ViEEffectFilter;
+class VideoCodingModule;
+
+class ViEEncoder : public RtcpIntraFrameObserver,
+ public VideoEncoderRateObserver,
+ public VCMPacketizationCallback,
+ public VCMSendStatisticsCallback,
+ public VideoCaptureCallback {
+ public:
+ friend class ViEBitrateObserver;
+
+ ViEEncoder(uint32_t number_of_cores,
+ ProcessThread* module_process_thread,
+ SendStatisticsProxy* stats_proxy,
+ I420FrameCallback* pre_encode_callback,
+ PacedSender* pacer,
+ BitrateAllocator* bitrate_allocator);
+ ~ViEEncoder();
+
+ bool Init();
+
+ // This function is assumed to be called before any frames are delivered and
+ // only once.
+ // Ideally this would be done in Init, but the dependencies between ViEEncoder
+ // and ViEChannel makes it really hard to do in a good way.
+ void StartThreadsAndSetSharedMembers(
+ rtc::scoped_refptr<PayloadRouter> send_payload_router,
+ VCMProtectionCallback* vcm_protection_callback);
+
+ // This function must be called before the corresponding ViEChannel is
+ // deleted.
+ void StopThreadsAndRemoveSharedMembers();
+
+ void SetNetworkTransmissionState(bool is_transmitting);
+
+ // Returns the id of the owning channel.
+ int Owner() const;
+
+ // Drops incoming packets before they get to the encoder.
+ void Pause();
+ void Restart();
+
+ // Codec settings.
+ int32_t RegisterExternalEncoder(VideoEncoder* encoder,
+ uint8_t pl_type,
+ bool internal_source);
+ int32_t DeRegisterExternalEncoder(uint8_t pl_type);
+ int32_t SetEncoder(const VideoCodec& video_codec);
+
+ // Implementing VideoCaptureCallback.
+ void DeliverFrame(VideoFrame video_frame) override;
+
+ int32_t SendKeyFrame();
+
+ uint32_t LastObservedBitrateBps() const;
+ int CodecTargetBitrate(uint32_t* bitrate) const;
+ // Loss protection. Must be called before SetEncoder() to have max packet size
+ // updated according to protection.
+ // TODO(pbos): Set protection method on construction or extract vcm_ outside
+ // this class and set it on construction there.
+ void SetProtectionMethod(bool nack, bool fec);
+
+ // Buffering mode.
+ void SetSenderBufferingMode(int target_delay_ms);
+
+ // Implements VideoEncoderRateObserver.
+ void OnSetRates(uint32_t bitrate_bps, int framerate) override;
+
+ // Implements VCMPacketizationCallback.
+ int32_t SendData(uint8_t payload_type,
+ const EncodedImage& encoded_image,
+ const RTPFragmentationHeader& fragmentation_header,
+ const RTPVideoHeader* rtp_video_hdr) override;
+ void OnEncoderImplementationName(const char* implementation_name) override;
+
+ // Implements VideoSendStatisticsCallback.
+ int32_t SendStatistics(const uint32_t bit_rate,
+ const uint32_t frame_rate) override;
+
+ // Implements RtcpIntraFrameObserver.
+ void OnReceivedIntraFrameRequest(uint32_t ssrc) override;
+ void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) override;
+ void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) override;
+ void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) override;
+
+ // Sets SSRCs for all streams.
+ void SetSsrcs(const std::vector<uint32_t>& ssrcs);
+
+ void SetMinTransmitBitrate(int min_transmit_bitrate_kbps);
+
+ // Lets the sender suspend video when the rate drops below
+ // |threshold_bps|, and turns back on when the rate goes back up above
+ // |threshold_bps| + |window_bps|.
+ void SuspendBelowMinBitrate();
+
+ // New-style callbacks, used by VideoSendStream.
+ void RegisterPostEncodeImageCallback(
+ EncodedImageCallback* post_encode_callback);
+
+ int GetPaddingNeededBps() const;
+
+ protected:
+ // Called by BitrateObserver.
+ void OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms);
+
+ private:
+ bool EncoderPaused() const EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+ void TraceFrameDropStart() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+ void TraceFrameDropEnd() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+
+ const uint32_t number_of_cores_;
+
+ const rtc::scoped_ptr<VideoProcessing> vp_;
+ const rtc::scoped_ptr<QMVideoSettingsCallback> qm_callback_;
+ const rtc::scoped_ptr<VideoCodingModule> vcm_;
+ rtc::scoped_refptr<PayloadRouter> send_payload_router_;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
+ rtc::scoped_ptr<BitrateObserver> bitrate_observer_;
+
+ SendStatisticsProxy* const stats_proxy_;
+ I420FrameCallback* const pre_encode_callback_;
+ PacedSender* const pacer_;
+ BitrateAllocator* const bitrate_allocator_;
+
+ // The time we last received an input frame or encoded frame. This is used to
+ // track when video is stopped long enough that we also want to stop sending
+ // padding.
+ int64_t time_of_last_frame_activity_ms_ GUARDED_BY(data_cs_);
+ VideoCodec encoder_config_ GUARDED_BY(data_cs_);
+ int min_transmit_bitrate_kbps_ GUARDED_BY(data_cs_);
+ uint32_t last_observed_bitrate_bps_ GUARDED_BY(data_cs_);
+ int target_delay_ms_ GUARDED_BY(data_cs_);
+ bool network_is_transmitting_ GUARDED_BY(data_cs_);
+ bool encoder_paused_ GUARDED_BY(data_cs_);
+ bool encoder_paused_and_dropped_frame_ GUARDED_BY(data_cs_);
+ std::map<unsigned int, int64_t> time_last_intra_request_ms_
+ GUARDED_BY(data_cs_);
+
+ ProcessThread* module_process_thread_;
+
+ bool has_received_sli_ GUARDED_BY(data_cs_);
+ uint8_t picture_id_sli_ GUARDED_BY(data_cs_);
+ bool has_received_rpsi_ GUARDED_BY(data_cs_);
+ uint64_t picture_id_rpsi_ GUARDED_BY(data_cs_);
+ std::map<uint32_t, int> ssrc_streams_ GUARDED_BY(data_cs_);
+
+ bool video_suspended_ GUARDED_BY(data_cs_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_ENCODER_H_
diff --git a/webrtc/video/vie_receiver.cc b/webrtc/video/vie_receiver.cc
new file mode 100644
index 0000000000..4fb706c764
--- /dev/null
+++ b/webrtc/video/vie_receiver.cc
@@ -0,0 +1,483 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_receiver.h"
+
+#include <vector>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+static const int kPacketLogIntervalMs = 10000;
+
+ViEReceiver::ViEReceiver(VideoCodingModule* module_vcm,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpFeedback* rtp_feedback)
+ : receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ clock_(Clock::GetRealTimeClock()),
+ rtp_header_parser_(RtpHeaderParser::Create()),
+ rtp_payload_registry_(
+ new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(false))),
+ rtp_receiver_(
+ RtpReceiver::CreateVideoReceiver(clock_,
+ this,
+ rtp_feedback,
+ rtp_payload_registry_.get())),
+ rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
+ fec_receiver_(FecReceiver::Create(this)),
+ rtp_rtcp_(NULL),
+ vcm_(module_vcm),
+ remote_bitrate_estimator_(remote_bitrate_estimator),
+ ntp_estimator_(new RemoteNtpTimeEstimator(clock_)),
+ receiving_(false),
+ restored_packet_in_use_(false),
+ receiving_ast_enabled_(false),
+ receiving_cvo_enabled_(false),
+ receiving_tsn_enabled_(false),
+ last_packet_log_ms_(-1) {
+ assert(remote_bitrate_estimator);
+}
+
+ViEReceiver::~ViEReceiver() {
+ UpdateHistograms();
+}
+
+void ViEReceiver::UpdateHistograms() {
+ FecPacketCounter counter = fec_receiver_->GetPacketCounter();
+ if (counter.num_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.ReceivedFecPacketsInPercent",
+ static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
+ }
+ if (counter.num_fec_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
+ static_cast<int>(counter.num_recovered_packets * 100 /
+ counter.num_fec_packets));
+ }
+}
+
+bool ViEReceiver::SetReceiveCodec(const VideoCodec& video_codec) {
+ int8_t old_pltype = -1;
+ if (rtp_payload_registry_->ReceivePayloadType(video_codec.plName,
+ kVideoPayloadTypeFrequency,
+ 0,
+ video_codec.maxBitrate,
+ &old_pltype) != -1) {
+ rtp_payload_registry_->DeRegisterReceivePayload(old_pltype);
+ }
+
+ return RegisterPayload(video_codec);
+}
+
+bool ViEReceiver::RegisterPayload(const VideoCodec& video_codec) {
+ return rtp_receiver_->RegisterReceivePayload(video_codec.plName,
+ video_codec.plType,
+ kVideoPayloadTypeFrequency,
+ 0,
+ video_codec.maxBitrate) == 0;
+}
+
+void ViEReceiver::SetNackStatus(bool enable,
+ int max_nack_reordering_threshold) {
+ if (!enable) {
+ // Reset the threshold back to the lower default threshold when NACK is
+ // disabled since we no longer will be receiving retransmissions.
+ max_nack_reordering_threshold = kDefaultMaxReorderingThreshold;
+ }
+ rtp_receive_statistics_->SetMaxReorderingThreshold(
+ max_nack_reordering_threshold);
+ rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
+}
+
+void ViEReceiver::SetRtxPayloadType(int payload_type,
+ int associated_payload_type) {
+ rtp_payload_registry_->SetRtxPayloadType(payload_type,
+ associated_payload_type);
+}
+
+void ViEReceiver::SetUseRtxPayloadMappingOnRestore(bool val) {
+ rtp_payload_registry_->set_use_rtx_payload_mapping_on_restore(val);
+}
+
+void ViEReceiver::SetRtxSsrc(uint32_t ssrc) {
+ rtp_payload_registry_->SetRtxSsrc(ssrc);
+}
+
+bool ViEReceiver::GetRtxSsrc(uint32_t* ssrc) const {
+ return rtp_payload_registry_->GetRtxSsrc(ssrc);
+}
+
+bool ViEReceiver::IsFecEnabled() const {
+ return rtp_payload_registry_->ulpfec_payload_type() > -1;
+}
+
+uint32_t ViEReceiver::GetRemoteSsrc() const {
+ return rtp_receiver_->SSRC();
+}
+
+int ViEReceiver::GetCsrcs(uint32_t* csrcs) const {
+ return rtp_receiver_->CSRCs(csrcs);
+}
+
+void ViEReceiver::SetRtpRtcpModule(RtpRtcp* module) {
+ rtp_rtcp_ = module;
+}
+
+RtpReceiver* ViEReceiver::GetRtpReceiver() const {
+ return rtp_receiver_.get();
+}
+
+void ViEReceiver::RegisterRtpRtcpModules(
+ const std::vector<RtpRtcp*>& rtp_modules) {
+ CriticalSectionScoped cs(receive_cs_.get());
+ // Only change the "simulcast" modules, the base module can be accessed
+ // without a lock whereas the simulcast modules require locking as they can be
+ // changed in runtime.
+ rtp_rtcp_simulcast_ =
+ std::vector<RtpRtcp*>(rtp_modules.begin() + 1, rtp_modules.end());
+}
+
+bool ViEReceiver::SetReceiveTimestampOffsetStatus(bool enable, int id) {
+ if (enable) {
+ return rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset, id);
+ } else {
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset);
+ }
+}
+
+bool ViEReceiver::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, id)) {
+ receiving_ast_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_ast_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime);
+ }
+}
+
+bool ViEReceiver::SetReceiveVideoRotationStatus(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionVideoRotation, id)) {
+ receiving_cvo_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_cvo_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionVideoRotation);
+ }
+}
+
+bool ViEReceiver::SetReceiveTransportSequenceNumber(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber, id)) {
+ receiving_tsn_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_tsn_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber);
+ }
+}
+
+int ViEReceiver::ReceivedRTPPacket(const void* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ return InsertRTPPacket(static_cast<const uint8_t*>(rtp_packet),
+ rtp_packet_length, packet_time);
+}
+
+int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
+ size_t rtcp_packet_length) {
+ return InsertRTCPPacket(static_cast<const uint8_t*>(rtcp_packet),
+ rtcp_packet_length);
+}
+
+int32_t ViEReceiver::OnReceivedPayloadData(const uint8_t* payload_data,
+ const size_t payload_size,
+ const WebRtcRTPHeader* rtp_header) {
+ WebRtcRTPHeader rtp_header_with_ntp = *rtp_header;
+ rtp_header_with_ntp.ntp_time_ms =
+ ntp_estimator_->Estimate(rtp_header->header.timestamp);
+ if (vcm_->IncomingPacket(payload_data,
+ payload_size,
+ rtp_header_with_ntp) != 0) {
+ // Check this...
+ return -1;
+ }
+ return 0;
+}
+
+bool ViEReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length) {
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
+ return false;
+ }
+ header.payload_type_frequency = kVideoPayloadTypeFrequency;
+ bool in_order = IsPacketInOrder(header);
+ return ReceivePacket(rtp_packet, rtp_packet_length, header, in_order);
+}
+
+int ViEReceiver::InsertRTPPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ {
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (!receiving_) {
+ return -1;
+ }
+ }
+
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length,
+ &header)) {
+ return -1;
+ }
+ size_t payload_length = rtp_packet_length - header.headerLength;
+ int64_t arrival_time_ms;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (packet_time.timestamp != -1)
+ arrival_time_ms = (packet_time.timestamp + 500) / 1000;
+ else
+ arrival_time_ms = now_ms;
+
+ {
+ // Periodically log the RTP header of incoming packets.
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
+ std::stringstream ss;
+ ss << "Packet received on SSRC: " << header.ssrc << " with payload type: "
+ << static_cast<int>(header.payloadType) << ", timestamp: "
+ << header.timestamp << ", sequence number: " << header.sequenceNumber
+ << ", arrival time: " << arrival_time_ms;
+ if (header.extension.hasTransmissionTimeOffset)
+ ss << ", toffset: " << header.extension.transmissionTimeOffset;
+ if (header.extension.hasAbsoluteSendTime)
+ ss << ", abs send time: " << header.extension.absoluteSendTime;
+ LOG(LS_INFO) << ss.str();
+ last_packet_log_ms_ = now_ms;
+ }
+ }
+
+ remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_length,
+ header, true);
+ header.payload_type_frequency = kVideoPayloadTypeFrequency;
+
+ bool in_order = IsPacketInOrder(header);
+ rtp_payload_registry_->SetIncomingPayloadType(header);
+ int ret = ReceivePacket(rtp_packet, rtp_packet_length, header, in_order)
+ ? 0
+ : -1;
+ // Update receive statistics after ReceivePacket.
+ // Receive statistics will be reset if the payload type changes (make sure
+ // that the first packet is included in the stats).
+ rtp_receive_statistics_->IncomingPacket(
+ header, rtp_packet_length, IsPacketRetransmitted(header, in_order));
+ return ret;
+}
+
+bool ViEReceiver::ReceivePacket(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header,
+ bool in_order) {
+ if (rtp_payload_registry_->IsEncapsulated(header)) {
+ return ParseAndHandleEncapsulatingHeader(packet, packet_length, header);
+ }
+ const uint8_t* payload = packet + header.headerLength;
+ assert(packet_length >= header.headerLength);
+ size_t payload_length = packet_length - header.headerLength;
+ PayloadUnion payload_specific;
+ if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
+ &payload_specific)) {
+ return false;
+ }
+ return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
+ payload_specific, in_order);
+}
+
+bool ViEReceiver::ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header) {
+ if (rtp_payload_registry_->IsRed(header)) {
+ int8_t ulpfec_pt = rtp_payload_registry_->ulpfec_payload_type();
+ if (packet[header.headerLength] == ulpfec_pt) {
+ rtp_receive_statistics_->FecPacketReceived(header, packet_length);
+ // Notify vcm about received FEC packets to avoid NACKing these packets.
+ NotifyReceiverOfFecPacket(header);
+ }
+ if (fec_receiver_->AddReceivedRedPacket(
+ header, packet, packet_length, ulpfec_pt) != 0) {
+ return false;
+ }
+ return fec_receiver_->ProcessReceivedFec() == 0;
+ } else if (rtp_payload_registry_->IsRtx(header)) {
+ if (header.headerLength + header.paddingLength == packet_length) {
+ // This is an empty packet and should be silently dropped before trying to
+ // parse the RTX header.
+ return true;
+ }
+ // Remove the RTX header and parse the original RTP header.
+ if (packet_length < header.headerLength)
+ return false;
+ if (packet_length > sizeof(restored_packet_))
+ return false;
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (restored_packet_in_use_) {
+ LOG(LS_WARNING) << "Multiple RTX headers detected, dropping packet.";
+ return false;
+ }
+ if (!rtp_payload_registry_->RestoreOriginalPacket(
+ restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
+ header)) {
+ LOG(LS_WARNING) << "Incoming RTX packet: Invalid RTP header";
+ return false;
+ }
+ restored_packet_in_use_ = true;
+ bool ret = OnRecoveredPacket(restored_packet_, packet_length);
+ restored_packet_in_use_ = false;
+ return ret;
+ }
+ return false;
+}
+
+void ViEReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) {
+ int8_t last_media_payload_type =
+ rtp_payload_registry_->last_received_media_payload_type();
+ if (last_media_payload_type < 0) {
+ LOG(LS_WARNING) << "Failed to get last media payload type.";
+ return;
+ }
+ // Fake an empty media packet.
+ WebRtcRTPHeader rtp_header = {};
+ rtp_header.header = header;
+ rtp_header.header.payloadType = last_media_payload_type;
+ rtp_header.header.paddingLength = 0;
+ PayloadUnion payload_specific;
+ if (!rtp_payload_registry_->GetPayloadSpecifics(last_media_payload_type,
+ &payload_specific)) {
+ LOG(LS_WARNING) << "Failed to get payload specifics.";
+ return;
+ }
+ rtp_header.type.Video.codec = payload_specific.Video.videoCodecType;
+ rtp_header.type.Video.rotation = kVideoRotation_0;
+ if (header.extension.hasVideoRotation) {
+ rtp_header.type.Video.rotation =
+ ConvertCVOByteToVideoRotation(header.extension.videoRotation);
+ }
+ OnReceivedPayloadData(NULL, 0, &rtp_header);
+}
+
+int ViEReceiver::InsertRTCPPacket(const uint8_t* rtcp_packet,
+ size_t rtcp_packet_length) {
+ {
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (!receiving_) {
+ return -1;
+ }
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_simulcast_)
+ rtp_rtcp->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+ }
+ assert(rtp_rtcp_); // Should be set by owner at construction time.
+ int ret = rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+ if (ret != 0) {
+ return ret;
+ }
+
+ int64_t rtt = 0;
+ rtp_rtcp_->RTT(rtp_receiver_->SSRC(), &rtt, NULL, NULL, NULL);
+ if (rtt == 0) {
+ // Waiting for valid rtt.
+ return 0;
+ }
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
+ &rtp_timestamp)) {
+ // Waiting for RTCP.
+ return 0;
+ }
+ ntp_estimator_->UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
+
+ return 0;
+}
+
+void ViEReceiver::StartReceive() {
+ CriticalSectionScoped cs(receive_cs_.get());
+ receiving_ = true;
+}
+
+void ViEReceiver::StopReceive() {
+ CriticalSectionScoped cs(receive_cs_.get());
+ receiving_ = false;
+}
+
+ReceiveStatistics* ViEReceiver::GetReceiveStatistics() const {
+ return rtp_receive_statistics_.get();
+}
+
+bool ViEReceiver::IsPacketInOrder(const RTPHeader& header) const {
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(header.ssrc);
+ if (!statistician)
+ return false;
+ return statistician->IsPacketInOrder(header.sequenceNumber);
+}
+
+bool ViEReceiver::IsPacketRetransmitted(const RTPHeader& header,
+ bool in_order) const {
+ // Retransmissions are handled separately if RTX is enabled.
+ if (rtp_payload_registry_->RtxEnabled())
+ return false;
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(header.ssrc);
+ if (!statistician)
+ return false;
+ // Check if this is a retransmission.
+ int64_t min_rtt = 0;
+ rtp_rtcp_->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
+ return !in_order &&
+ statistician->IsRetransmitOfOldPacket(header, min_rtt);
+}
+} // namespace webrtc
diff --git a/webrtc/video/vie_receiver.h b/webrtc/video/vie_receiver.h
new file mode 100644
index 0000000000..8204888bbb
--- /dev/null
+++ b/webrtc/video/vie_receiver.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIE_RECEIVER_H_
+#define WEBRTC_VIDEO_VIE_RECEIVER_H_
+
+#include <list>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class FecReceiver;
+class RemoteNtpTimeEstimator;
+class ReceiveStatistics;
+class RemoteBitrateEstimator;
+class RtpHeaderParser;
+class RTPPayloadRegistry;
+class RtpReceiver;
+class RtpRtcp;
+class VideoCodingModule;
+struct ReceiveBandwidthEstimatorStats;
+
+class ViEReceiver : public RtpData {
+ public:
+ ViEReceiver(VideoCodingModule* module_vcm,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpFeedback* rtp_feedback);
+ ~ViEReceiver();
+
+ bool SetReceiveCodec(const VideoCodec& video_codec);
+ bool RegisterPayload(const VideoCodec& video_codec);
+
+ void SetNackStatus(bool enable, int max_nack_reordering_threshold);
+ void SetRtxPayloadType(int payload_type, int associated_payload_type);
+ // If set to true, the RTX payload type mapping supplied in
+ // |SetRtxPayloadType| will be used when restoring RTX packets. Without it,
+ // RTX packets will always be restored to the last non-RTX packet payload type
+ // received.
+ void SetUseRtxPayloadMappingOnRestore(bool val);
+ void SetRtxSsrc(uint32_t ssrc);
+ bool GetRtxSsrc(uint32_t* ssrc) const;
+
+ bool IsFecEnabled() const;
+
+ uint32_t GetRemoteSsrc() const;
+ int GetCsrcs(uint32_t* csrcs) const;
+
+ void SetRtpRtcpModule(RtpRtcp* module);
+
+ RtpReceiver* GetRtpReceiver() const;
+
+ void RegisterRtpRtcpModules(const std::vector<RtpRtcp*>& rtp_modules);
+
+ bool SetReceiveTimestampOffsetStatus(bool enable, int id);
+ bool SetReceiveAbsoluteSendTimeStatus(bool enable, int id);
+ bool SetReceiveVideoRotationStatus(bool enable, int id);
+ bool SetReceiveTransportSequenceNumber(bool enable, int id);
+
+ void StartReceive();
+ void StopReceive();
+
+ // Receives packets from external transport.
+ int ReceivedRTPPacket(const void* rtp_packet, size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ int ReceivedRTCPPacket(const void* rtcp_packet, size_t rtcp_packet_length);
+
+ // Implements RtpData.
+ int32_t OnReceivedPayloadData(const uint8_t* payload_data,
+ const size_t payload_size,
+ const WebRtcRTPHeader* rtp_header) override;
+ bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override;
+
+ ReceiveStatistics* GetReceiveStatistics() const;
+
+ private:
+ int InsertRTPPacket(const uint8_t* rtp_packet, size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ bool ReceivePacket(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header,
+ bool in_order);
+ // Parses and handles for instance RTX and RED headers.
+ // This function assumes that it's being called from only one thread.
+ bool ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header);
+ void NotifyReceiverOfFecPacket(const RTPHeader& header);
+ int InsertRTCPPacket(const uint8_t* rtcp_packet, size_t rtcp_packet_length);
+ bool IsPacketInOrder(const RTPHeader& header) const;
+ bool IsPacketRetransmitted(const RTPHeader& header, bool in_order) const;
+ void UpdateHistograms();
+
+ rtc::scoped_ptr<CriticalSectionWrapper> receive_cs_;
+ Clock* clock_;
+ rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
+ rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
+ rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
+ const rtc::scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
+ rtc::scoped_ptr<FecReceiver> fec_receiver_;
+ RtpRtcp* rtp_rtcp_;
+ std::vector<RtpRtcp*> rtp_rtcp_simulcast_;
+ VideoCodingModule* vcm_;
+ RemoteBitrateEstimator* remote_bitrate_estimator_;
+
+ rtc::scoped_ptr<RemoteNtpTimeEstimator> ntp_estimator_;
+
+ bool receiving_;
+ uint8_t restored_packet_[IP_PACKET_SIZE];
+ bool restored_packet_in_use_;
+ bool receiving_ast_enabled_;
+ bool receiving_cvo_enabled_;
+ bool receiving_tsn_enabled_;
+ int64_t last_packet_log_ms_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_RECEIVER_H_
diff --git a/webrtc/video/vie_remb.cc b/webrtc/video/vie_remb.cc
new file mode 100644
index 0000000000..95c2f1e130
--- /dev/null
+++ b/webrtc/video/vie_remb.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_remb.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+const int kRembSendIntervalMs = 200;
+
+// % threshold for if we should send a new REMB asap.
+const unsigned int kSendThresholdPercent = 97;
+
+VieRemb::VieRemb(Clock* clock)
+ : clock_(clock),
+ list_crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ last_remb_time_(clock_->TimeInMilliseconds()),
+ last_send_bitrate_(0),
+ bitrate_(0) {}
+
+VieRemb::~VieRemb() {}
+
+void VieRemb::AddReceiveChannel(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ if (std::find(receive_modules_.begin(), receive_modules_.end(), rtp_rtcp) !=
+ receive_modules_.end())
+ return;
+
+ // The module probably doesn't have a remote SSRC yet, so don't add it to the
+ // map.
+ receive_modules_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveReceiveChannel(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ for (RtpModules::iterator it = receive_modules_.begin();
+ it != receive_modules_.end(); ++it) {
+ if ((*it) == rtp_rtcp) {
+ receive_modules_.erase(it);
+ break;
+ }
+ }
+}
+
+void VieRemb::AddRembSender(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+
+ // Verify this module hasn't been added earlier.
+ if (std::find(rtcp_sender_.begin(), rtcp_sender_.end(), rtp_rtcp) !=
+ rtcp_sender_.end())
+ return;
+ rtcp_sender_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveRembSender(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ for (RtpModules::iterator it = rtcp_sender_.begin();
+ it != rtcp_sender_.end(); ++it) {
+ if ((*it) == rtp_rtcp) {
+ rtcp_sender_.erase(it);
+ return;
+ }
+ }
+}
+
+bool VieRemb::InUse() const {
+ CriticalSectionScoped cs(list_crit_.get());
+ if (receive_modules_.empty() && rtcp_sender_.empty())
+ return false;
+ else
+ return true;
+}
+
+void VieRemb::OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) {
+ list_crit_->Enter();
+ // If we already have an estimate, check if the new total estimate is below
+ // kSendThresholdPercent of the previous estimate.
+ if (last_send_bitrate_ > 0) {
+ unsigned int new_remb_bitrate = last_send_bitrate_ - bitrate_ + bitrate;
+
+ if (new_remb_bitrate < kSendThresholdPercent * last_send_bitrate_ / 100) {
+ // The new bitrate estimate is less than kSendThresholdPercent % of the
+ // last report. Send a REMB asap.
+ last_remb_time_ = clock_->TimeInMilliseconds() - kRembSendIntervalMs;
+ }
+ }
+ bitrate_ = bitrate;
+
+ // Calculate total receive bitrate estimate.
+ int64_t now = clock_->TimeInMilliseconds();
+
+ if (now - last_remb_time_ < kRembSendIntervalMs) {
+ list_crit_->Leave();
+ return;
+ }
+ last_remb_time_ = now;
+
+ if (ssrcs.empty() || receive_modules_.empty()) {
+ list_crit_->Leave();
+ return;
+ }
+
+ // Send a REMB packet.
+ RtpRtcp* sender = NULL;
+ if (!rtcp_sender_.empty()) {
+ sender = rtcp_sender_.front();
+ } else {
+ sender = receive_modules_.front();
+ }
+ last_send_bitrate_ = bitrate_;
+
+ list_crit_->Leave();
+
+ if (sender) {
+ sender->SetREMBData(bitrate_, ssrcs);
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_remb.h b/webrtc/video/vie_remb.h
new file mode 100644
index 0000000000..2a3d916d6c
--- /dev/null
+++ b/webrtc/video/vie_remb.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIE_REMB_H_
+#define WEBRTC_VIDEO_VIE_REMB_H_
+
+#include <list>
+#include <utility>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class ProcessThread;
+class RtpRtcp;
+
+class VieRemb : public RemoteBitrateObserver {
+ public:
+ explicit VieRemb(Clock* clock);
+ ~VieRemb();
+
+ // Called to add a receive channel to include in the REMB packet.
+ void AddReceiveChannel(RtpRtcp* rtp_rtcp);
+
+ // Removes the specified channel from REMB estimate.
+ void RemoveReceiveChannel(RtpRtcp* rtp_rtcp);
+
+ // Called to add a module that can generate and send REMB RTCP.
+ void AddRembSender(RtpRtcp* rtp_rtcp);
+
+ // Removes a REMB RTCP sender.
+ void RemoveRembSender(RtpRtcp* rtp_rtcp);
+
+ // Returns true if the instance is in use, false otherwise.
+ bool InUse() const;
+
+ // Called every time there is a new bitrate estimate for a receive channel
+ // group. This call will trigger a new RTCP REMB packet if the bitrate
+ // estimate has decreased or if no RTCP REMB packet has been sent for
+ // a certain time interval.
+ // Implements RtpReceiveBitrateUpdate.
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate);
+
+ private:
+ typedef std::list<RtpRtcp*> RtpModules;
+
+ Clock* const clock_;
+ rtc::scoped_ptr<CriticalSectionWrapper> list_crit_;
+
+ // The last time a REMB was sent.
+ int64_t last_remb_time_;
+ unsigned int last_send_bitrate_;
+
+ // All RtpRtcp modules to include in the REMB packet.
+ RtpModules receive_modules_;
+
+ // All modules that can send REMB RTCP.
+ RtpModules rtcp_sender_;
+
+ // The last bitrate update.
+ unsigned int bitrate_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_REMB_H_
diff --git a/webrtc/video/vie_remb_unittest.cc b/webrtc/video/vie_remb_unittest.cc
new file mode 100644
index 0000000000..a44d593b22
--- /dev/null
+++ b/webrtc/video/vie_remb_unittest.cc
@@ -0,0 +1,253 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for ViERemb.
+
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/vie_remb.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::NiceMock;
+using ::testing::Return;
+
+namespace webrtc {
+
+class ViERembTest : public ::testing::Test {
+ public:
+ ViERembTest() : fake_clock_(12345) {}
+
+ protected:
+ virtual void SetUp() {
+ process_thread_.reset(new NiceMock<MockProcessThread>);
+ vie_remb_.reset(new VieRemb(&fake_clock_));
+ }
+ SimulatedClock fake_clock_;
+ rtc::scoped_ptr<MockProcessThread> process_thread_;
+ rtc::scoped_ptr<VieRemb> vie_remb_;
+};
+
+TEST_F(ViERembTest, OneModuleTestForSendingRemb) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+
+ const unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower bitrate to send another REMB packet.
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate - 100, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate - 100);
+
+ vie_remb_->RemoveReceiveChannel(&rtp);
+ vie_remb_->RemoveRembSender(&rtp);
+}
+
+TEST_F(ViERembTest, LowerEstimateToSendRemb) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate with more than 3% to trigger a call to SetREMBData right
+ // away.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+}
+
+TEST_F(ViERembTest, VerifyIncreasingAndDecreasing) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate[] = { 456, 789 };
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
+
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[0], ssrcs))
+ .Times(1);
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1] + 100);
+
+ // Lower the estimate to trigger a callback.
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[1], ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1]);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveRembSender(&rtp_0);
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, NoRembForIncreasedBitrate) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Increased estimate shouldn't trigger a callback right away.
+ EXPECT_CALL(rtp_0, SetREMBData(_, _))
+ .Times(0);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate + 1);
+
+ // Decreasing the estimate less than 3% shouldn't trigger a new callback.
+ EXPECT_CALL(rtp_0, SetREMBData(_, _))
+ .Times(0);
+ int lower_estimate = bitrate_estimate * 98 / 100;
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, lower_estimate);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveRembSender(&rtp_0);
+}
+
+TEST_F(ViERembTest, ChangeSendRtpModule) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Decrease estimate to trigger a REMB.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Remove the sending module, add it again -> should get remb on the second
+ // module.
+ vie_remb_->RemoveRembSender(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp_1, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, OnlyOneRembForDoubleProcess) {
+ MockRtpRtcp rtp;
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate, should trigger a call to SetREMBData right away.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Call OnReceiveBitrateChanged again, this should not trigger a new callback.
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(0);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ vie_remb_->RemoveReceiveChannel(&rtp);
+ vie_remb_->RemoveRembSender(&rtp);
+}
+
+// Only register receiving modules and make sure we fallback to trigger a REMB
+// packet on this one.
+TEST_F(ViERembTest, NoSendingRtpModule) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate to trigger a new packet REMB packet.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_sync_module.cc b/webrtc/video/vie_sync_module.cc
new file mode 100644
index 0000000000..9ca9a9480e
--- /dev/null
+++ b/webrtc/video/vie_sync_module.cc
@@ -0,0 +1,174 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_sync_module.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/video/stream_synchronization.h"
+#include "webrtc/voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+int UpdateMeasurements(StreamSynchronization::Measurements* stream,
+ const RtpRtcp& rtp_rtcp, const RtpReceiver& receiver) {
+ if (!receiver.Timestamp(&stream->latest_timestamp))
+ return -1;
+ if (!receiver.LastReceivedTimeMs(&stream->latest_receive_time_ms))
+ return -1;
+
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp.RemoteNTP(&ntp_secs,
+ &ntp_frac,
+ NULL,
+ NULL,
+ &rtp_timestamp)) {
+ return -1;
+ }
+
+ bool new_rtcp_sr = false;
+ if (!UpdateRtcpList(
+ ntp_secs, ntp_frac, rtp_timestamp, &stream->rtcp, &new_rtcp_sr)) {
+ return -1;
+ }
+
+ return 0;
+}
+
+ViESyncModule::ViESyncModule(VideoCodingModule* vcm)
+ : data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ vcm_(vcm),
+ video_receiver_(NULL),
+ video_rtp_rtcp_(NULL),
+ voe_channel_id_(-1),
+ voe_sync_interface_(NULL),
+ last_sync_time_(TickTime::Now()),
+ sync_() {
+}
+
+ViESyncModule::~ViESyncModule() {
+}
+
+int ViESyncModule::ConfigureSync(int voe_channel_id,
+ VoEVideoSync* voe_sync_interface,
+ RtpRtcp* video_rtcp_module,
+ RtpReceiver* video_receiver) {
+ CriticalSectionScoped cs(data_cs_.get());
+ // Prevent expensive no-ops.
+ if (voe_channel_id_ == voe_channel_id &&
+ voe_sync_interface_ == voe_sync_interface &&
+ video_receiver_ == video_receiver &&
+ video_rtp_rtcp_ == video_rtcp_module) {
+ return 0;
+ }
+ voe_channel_id_ = voe_channel_id;
+ voe_sync_interface_ = voe_sync_interface;
+ video_receiver_ = video_receiver;
+ video_rtp_rtcp_ = video_rtcp_module;
+ sync_.reset(
+ new StreamSynchronization(video_rtp_rtcp_->SSRC(), voe_channel_id));
+
+ if (!voe_sync_interface) {
+ voe_channel_id_ = -1;
+ if (voe_channel_id >= 0) {
+ // Trying to set a voice channel but no interface exist.
+ return -1;
+ }
+ return 0;
+ }
+ return 0;
+}
+
+int ViESyncModule::VoiceChannel() {
+ return voe_channel_id_;
+}
+
+int64_t ViESyncModule::TimeUntilNextProcess() {
+ const int64_t kSyncIntervalMs = 1000;
+ return kSyncIntervalMs - (TickTime::Now() - last_sync_time_).Milliseconds();
+}
+
+int32_t ViESyncModule::Process() {
+ CriticalSectionScoped cs(data_cs_.get());
+ last_sync_time_ = TickTime::Now();
+
+ const int current_video_delay_ms = vcm_->Delay();
+
+ if (voe_channel_id_ == -1) {
+ return 0;
+ }
+ assert(video_rtp_rtcp_ && voe_sync_interface_);
+ assert(sync_.get());
+
+ int audio_jitter_buffer_delay_ms = 0;
+ int playout_buffer_delay_ms = 0;
+ if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
+ &audio_jitter_buffer_delay_ms,
+ &playout_buffer_delay_ms) != 0) {
+ return 0;
+ }
+ const int current_audio_delay_ms = audio_jitter_buffer_delay_ms +
+ playout_buffer_delay_ms;
+
+ RtpRtcp* voice_rtp_rtcp = NULL;
+ RtpReceiver* voice_receiver = NULL;
+ if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &voice_rtp_rtcp,
+ &voice_receiver)) {
+ return 0;
+ }
+ assert(voice_rtp_rtcp);
+ assert(voice_receiver);
+
+ if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_,
+ *video_receiver_) != 0) {
+ return 0;
+ }
+
+ if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp,
+ *voice_receiver) != 0) {
+ return 0;
+ }
+
+ int relative_delay_ms;
+ // Calculate how much later or earlier the audio stream is compared to video.
+ if (!sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_,
+ &relative_delay_ms)) {
+ return 0;
+ }
+
+ TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", current_video_delay_ms);
+ TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", current_audio_delay_ms);
+ TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms);
+ int target_audio_delay_ms = 0;
+ int target_video_delay_ms = current_video_delay_ms;
+ // Calculate the necessary extra audio delay and desired total video
+ // delay to get the streams in sync.
+ if (!sync_->ComputeDelays(relative_delay_ms,
+ current_audio_delay_ms,
+ &target_audio_delay_ms,
+ &target_video_delay_ms)) {
+ return 0;
+ }
+
+ if (voe_sync_interface_->SetMinimumPlayoutDelay(
+ voe_channel_id_, target_audio_delay_ms) == -1) {
+ LOG(LS_ERROR) << "Error setting voice delay.";
+ }
+ vcm_->SetMinimumPlayoutDelay(target_video_delay_ms);
+ return 0;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_sync_module.h b/webrtc/video/vie_sync_module.h
new file mode 100644
index 0000000000..a9ad20a103
--- /dev/null
+++ b/webrtc/video/vie_sync_module.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESyncModule is responsible for synchronization audio and video for a given
+// VoE and ViE channel couple.
+
+#ifndef WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
+#define WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/stream_synchronization.h"
+#include "webrtc/voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RtpRtcp;
+class VideoCodingModule;
+class ViEChannel;
+class VoEVideoSync;
+
+class ViESyncModule : public Module {
+ public:
+ explicit ViESyncModule(VideoCodingModule* vcm);
+ ~ViESyncModule();
+
+ int ConfigureSync(int voe_channel_id,
+ VoEVideoSync* voe_sync_interface,
+ RtpRtcp* video_rtcp_module,
+ RtpReceiver* video_receiver);
+
+ int VoiceChannel();
+
+ // Implements Module.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ private:
+ rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
+ VideoCodingModule* const vcm_;
+ RtpReceiver* video_receiver_;
+ RtpRtcp* video_rtp_rtcp_;
+ int voe_channel_id_;
+ VoEVideoSync* voe_sync_interface_;
+ TickTime last_sync_time_;
+ rtc::scoped_ptr<StreamSynchronization> sync_;
+ StreamSynchronization::Measurements audio_measurement_;
+ StreamSynchronization::Measurements video_measurement_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
diff --git a/webrtc/video/webrtc_video.gypi b/webrtc/video/webrtc_video.gypi
index f9dbbce5aa..db8d5c7e89 100644
--- a/webrtc/video/webrtc_video.gypi
+++ b/webrtc/video/webrtc_video.gypi
@@ -24,12 +24,24 @@
'<(webrtc_root)/webrtc.gyp:rtc_event_log',
],
'webrtc_video_sources': [
+ 'video/call_stats.cc',
+ 'video/call_stats.h',
'video/encoded_frame_callback_adapter.cc',
'video/encoded_frame_callback_adapter.h',
+ 'video/encoder_state_feedback.cc',
+ 'video/encoder_state_feedback.h',
+ 'video/overuse_frame_detector.cc',
+ 'video/overuse_frame_detector.h',
+ 'video/payload_router.cc',
+ 'video/payload_router.h',
'video/receive_statistics_proxy.cc',
'video/receive_statistics_proxy.h',
+ 'video/report_block_stats.cc',
+ 'video/report_block_stats.h',
'video/send_statistics_proxy.cc',
'video/send_statistics_proxy.h',
+ 'video/stream_synchronization.cc',
+ 'video/stream_synchronization.h',
'video/video_capture_input.cc',
'video/video_capture_input.h',
'video/video_decoder.cc',
@@ -38,29 +50,16 @@
'video/video_receive_stream.h',
'video/video_send_stream.cc',
'video/video_send_stream.h',
- 'video_engine/call_stats.cc',
- 'video_engine/call_stats.h',
- 'video_engine/encoder_state_feedback.cc',
- 'video_engine/encoder_state_feedback.h',
- 'video_engine/overuse_frame_detector.cc',
- 'video_engine/overuse_frame_detector.h',
- 'video_engine/payload_router.cc',
- 'video_engine/payload_router.h',
- 'video_engine/report_block_stats.cc',
- 'video_engine/report_block_stats.h',
- 'video_engine/stream_synchronization.cc',
- 'video_engine/stream_synchronization.h',
- 'video_engine/vie_channel.cc',
- 'video_engine/vie_channel.h',
- 'video_engine/vie_defines.h',
- 'video_engine/vie_encoder.cc',
- 'video_engine/vie_encoder.h',
- 'video_engine/vie_receiver.cc',
- 'video_engine/vie_receiver.h',
- 'video_engine/vie_remb.cc',
- 'video_engine/vie_remb.h',
- 'video_engine/vie_sync_module.cc',
- 'video_engine/vie_sync_module.h',
+ 'video/vie_channel.cc',
+ 'video/vie_channel.h',
+ 'video/vie_encoder.cc',
+ 'video/vie_encoder.h',
+ 'video/vie_receiver.cc',
+ 'video/vie_receiver.h',
+ 'video/vie_remb.cc',
+ 'video/vie_remb.h',
+ 'video/vie_sync_module.cc',
+ 'video/vie_sync_module.h',
],
},
}