aboutsummaryrefslogtreecommitdiff
path: root/webrtc/video_engine
diff options
context:
space:
mode:
authorChih-hung Hsieh <chh@google.com>2015-12-01 17:07:48 +0000
committerandroid-build-merger <android-build-merger@google.com>2015-12-01 17:07:48 +0000
commita4acd9d6bc9b3b033d7d274316e75ee067df8d20 (patch)
tree672a185b294789cf991f385c3e395dd63bea9063 /webrtc/video_engine
parent3681b90ba4fe7a27232dd3e27897d5d7ed9d651c (diff)
parentfe8b4a657979b49e1701bd92f6d5814a99e0b2be (diff)
downloadwebrtc-a4acd9d6bc9b3b033d7d274316e75ee067df8d20.tar.gz
Merge changes I7bbf776e,I1b827825
am: fe8b4a6579 * commit 'fe8b4a657979b49e1701bd92f6d5814a99e0b2be': (7237 commits) WIP: Changes after merge commit 'cb3f9bd' Make the nonlinear beamformer steerable Utilize bitrate above codec max to protect video. Enable VP9 internal resize by default. Filter overlapping RTP header extensions. Make VCMEncodedFrameCallback const. MediaCodecVideoEncoder: Add number of quality resolution downscales to Encoded callback. Remove redudant encoder rate calls. Create isolate files for nonparallel tests. Register header extensions in RtpRtcpObserver to avoid log spam. Make an enum class out of NetEqDecoder, and hide the neteq_decoders_ table ACM: Move NACK functionality inside NetEq Fix chromium-style warnings in webrtc/sound/. Create a 'webrtc_nonparallel_tests' target. Update scalability structure data according to updates in the RTP payload profile. audio_coding: rename interface -> include Rewrote perform_action_on_all_files to be parallell. Update reference indices according to updates in the RTP payload profile. Disable P2PTransport...TestFailoverControlledSide on Memcheck pass clangcl compile options to ignore warnings in gflags.cc ...
Diffstat (limited to 'webrtc/video_engine')
-rw-r--r--webrtc/video_engine/OWNERS13
-rw-r--r--webrtc/video_engine/call_stats.cc167
-rw-r--r--webrtc/video_engine/call_stats.h81
-rw-r--r--webrtc/video_engine/call_stats_unittest.cc203
-rw-r--r--webrtc/video_engine/encoder_state_feedback.cc124
-rw-r--r--webrtc/video_engine/encoder_state_feedback.h71
-rw-r--r--webrtc/video_engine/encoder_state_feedback_unittest.cc143
-rw-r--r--webrtc/video_engine/overuse_frame_detector.cc422
-rw-r--r--webrtc/video_engine/overuse_frame_detector.h181
-rw-r--r--webrtc/video_engine/overuse_frame_detector_unittest.cc405
-rw-r--r--webrtc/video_engine/payload_router.cc101
-rw-r--r--webrtc/video_engine/payload_router.h85
-rw-r--r--webrtc/video_engine/payload_router_unittest.cc209
-rw-r--r--webrtc/video_engine/report_block_stats.cc111
-rw-r--r--webrtc/video_engine/report_block_stats.h62
-rw-r--r--webrtc/video_engine/report_block_stats_unittest.cc146
-rw-r--r--webrtc/video_engine/stream_synchronization.cc226
-rw-r--r--webrtc/video_engine/stream_synchronization.h59
-rw-r--r--webrtc/video_engine/stream_synchronization_unittest.cc562
-rw-r--r--webrtc/video_engine/video_engine_core_unittests.gyp74
-rw-r--r--webrtc/video_engine/video_engine_core_unittests.isolate23
-rw-r--r--webrtc/video_engine/vie_channel.cc1253
-rw-r--r--webrtc/video_engine/vie_channel.h458
-rw-r--r--webrtc/video_engine/vie_codec_unittest.cc230
-rw-r--r--webrtc/video_engine/vie_defines.h120
-rw-r--r--webrtc/video_engine/vie_encoder.cc710
-rw-r--r--webrtc/video_engine/vie_encoder.h201
-rw-r--r--webrtc/video_engine/vie_receiver.cc482
-rw-r--r--webrtc/video_engine/vie_receiver.h131
-rw-r--r--webrtc/video_engine/vie_remb.cc143
-rw-r--r--webrtc/video_engine/vie_remb.h78
-rw-r--r--webrtc/video_engine/vie_remb_unittest.cc251
-rw-r--r--webrtc/video_engine/vie_sync_module.cc188
-rw-r--r--webrtc/video_engine/vie_sync_module.h65
34 files changed, 7778 insertions, 0 deletions
diff --git a/webrtc/video_engine/OWNERS b/webrtc/video_engine/OWNERS
new file mode 100644
index 0000000000..a8201f0252
--- /dev/null
+++ b/webrtc/video_engine/OWNERS
@@ -0,0 +1,13 @@
+mflodman@webrtc.org
+pbos@webrtc.org
+stefan@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
+per-file BUILD.gn=kjellander@webrtc.org
+
diff --git a/webrtc/video_engine/call_stats.cc b/webrtc/video_engine/call_stats.cc
new file mode 100644
index 0000000000..0b71cc346c
--- /dev/null
+++ b/webrtc/video_engine/call_stats.cc
@@ -0,0 +1,167 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/call_stats.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc {
+namespace {
+// Time interval for updating the observers.
+const int64_t kUpdateIntervalMs = 1000;
+// Weight factor to apply to the average rtt.
+const float kWeightFactor = 0.3f;
+
+void RemoveOldReports(int64_t now, std::list<CallStats::RttTime>* reports) {
+ // A rtt report is considered valid for this long.
+ const int64_t kRttTimeoutMs = 1500;
+ while (!reports->empty() &&
+ (now - reports->front().time) > kRttTimeoutMs) {
+ reports->pop_front();
+ }
+}
+
+int64_t GetMaxRttMs(std::list<CallStats::RttTime>* reports) {
+ int64_t max_rtt_ms = 0;
+ for (std::list<CallStats::RttTime>::const_iterator it = reports->begin();
+ it != reports->end(); ++it) {
+ max_rtt_ms = std::max(it->rtt, max_rtt_ms);
+ }
+ return max_rtt_ms;
+}
+
+int64_t GetAvgRttMs(std::list<CallStats::RttTime>* reports) {
+ if (reports->empty()) {
+ return 0;
+ }
+ int64_t sum = 0;
+ for (std::list<CallStats::RttTime>::const_iterator it = reports->begin();
+ it != reports->end(); ++it) {
+ sum += it->rtt;
+ }
+ return sum / reports->size();
+}
+
+void UpdateAvgRttMs(std::list<CallStats::RttTime>* reports, int64_t* avg_rtt) {
+ uint32_t cur_rtt_ms = GetAvgRttMs(reports);
+ if (cur_rtt_ms == 0) {
+ // Reset.
+ *avg_rtt = 0;
+ return;
+ }
+ if (*avg_rtt == 0) {
+ // Initialize.
+ *avg_rtt = cur_rtt_ms;
+ return;
+ }
+ *avg_rtt = *avg_rtt * (1.0f - kWeightFactor) + cur_rtt_ms * kWeightFactor;
+}
+} // namespace
+
+class RtcpObserver : public RtcpRttStats {
+ public:
+ explicit RtcpObserver(CallStats* owner) : owner_(owner) {}
+ virtual ~RtcpObserver() {}
+
+ virtual void OnRttUpdate(int64_t rtt) {
+ owner_->OnRttUpdate(rtt);
+ }
+
+ // Returns the average RTT.
+ virtual int64_t LastProcessedRtt() const {
+ return owner_->avg_rtt_ms();
+ }
+
+ private:
+ CallStats* owner_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(RtcpObserver);
+};
+
+CallStats::CallStats()
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ rtcp_rtt_stats_(new RtcpObserver(this)),
+ last_process_time_(TickTime::MillisecondTimestamp()),
+ max_rtt_ms_(0),
+ avg_rtt_ms_(0) {
+}
+
+CallStats::~CallStats() {
+ assert(observers_.empty());
+}
+
+int64_t CallStats::TimeUntilNextProcess() {
+ return last_process_time_ + kUpdateIntervalMs -
+ TickTime::MillisecondTimestamp();
+}
+
+int32_t CallStats::Process() {
+ CriticalSectionScoped cs(crit_.get());
+ int64_t now = TickTime::MillisecondTimestamp();
+ if (now < last_process_time_ + kUpdateIntervalMs)
+ return 0;
+
+ last_process_time_ = now;
+
+ RemoveOldReports(now, &reports_);
+ max_rtt_ms_ = GetMaxRttMs(&reports_);
+ UpdateAvgRttMs(&reports_, &avg_rtt_ms_);
+
+ // If there is a valid rtt, update all observers with the max rtt.
+ // TODO(asapersson): Consider changing this to report the average rtt.
+ if (max_rtt_ms_ > 0) {
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ (*it)->OnRttUpdate(avg_rtt_ms_, max_rtt_ms_);
+ }
+ }
+ return 0;
+}
+
+int64_t CallStats::avg_rtt_ms() const {
+ CriticalSectionScoped cs(crit_.get());
+ return avg_rtt_ms_;
+}
+
+RtcpRttStats* CallStats::rtcp_rtt_stats() const {
+ return rtcp_rtt_stats_.get();
+}
+
+void CallStats::RegisterStatsObserver(CallStatsObserver* observer) {
+ CriticalSectionScoped cs(crit_.get());
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ if (*it == observer)
+ return;
+ }
+ observers_.push_back(observer);
+}
+
+void CallStats::DeregisterStatsObserver(CallStatsObserver* observer) {
+ CriticalSectionScoped cs(crit_.get());
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ if (*it == observer) {
+ observers_.erase(it);
+ return;
+ }
+ }
+}
+
+void CallStats::OnRttUpdate(int64_t rtt) {
+ CriticalSectionScoped cs(crit_.get());
+ reports_.push_back(RttTime(rtt, TickTime::MillisecondTimestamp()));
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/call_stats.h b/webrtc/video_engine/call_stats.h
new file mode 100644
index 0000000000..a17330a7c1
--- /dev/null
+++ b/webrtc/video_engine/call_stats.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_CALL_STATS_H_
+#define WEBRTC_VIDEO_ENGINE_CALL_STATS_H_
+
+#include <list>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/interface/module.h"
+
+namespace webrtc {
+
+class CallStatsObserver;
+class CriticalSectionWrapper;
+class RtcpRttStats;
+
+// CallStats keeps track of statistics for a call.
+class CallStats : public Module {
+ public:
+ friend class RtcpObserver;
+
+ CallStats();
+ ~CallStats();
+
+ // Implements Module, to use the process thread.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ // Returns a RtcpRttStats to register at a statistics provider. The object
+ // has the same lifetime as the CallStats instance.
+ RtcpRttStats* rtcp_rtt_stats() const;
+
+ // Registers/deregisters a new observer to receive statistics updates.
+ void RegisterStatsObserver(CallStatsObserver* observer);
+ void DeregisterStatsObserver(CallStatsObserver* observer);
+
+ // Helper struct keeping track of the time a rtt value is reported.
+ struct RttTime {
+ RttTime(int64_t new_rtt, int64_t rtt_time)
+ : rtt(new_rtt), time(rtt_time) {}
+ const int64_t rtt;
+ const int64_t time;
+ };
+
+ protected:
+ void OnRttUpdate(int64_t rtt);
+
+ int64_t avg_rtt_ms() const;
+
+ private:
+ // Protecting all members.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+ // Observer receiving statistics updates.
+ rtc::scoped_ptr<RtcpRttStats> rtcp_rtt_stats_;
+ // The last time 'Process' resulted in statistic update.
+ int64_t last_process_time_;
+ // The last RTT in the statistics update (zero if there is no valid estimate).
+ int64_t max_rtt_ms_;
+ int64_t avg_rtt_ms_;
+
+ // All Rtt reports within valid time interval, oldest first.
+ std::list<RttTime> reports_;
+
+ // Observers getting stats reports.
+ std::list<CallStatsObserver*> observers_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(CallStats);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_CALL_STATS_H_
diff --git a/webrtc/video_engine/call_stats_unittest.cc b/webrtc/video_engine/call_stats_unittest.cc
new file mode 100644
index 0000000000..4fb88df338
--- /dev/null
+++ b/webrtc/video_engine/call_stats_unittest.cc
@@ -0,0 +1,203 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video_engine/call_stats.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::Return;
+
+namespace webrtc {
+
+class MockStatsObserver : public CallStatsObserver {
+ public:
+ MockStatsObserver() {}
+ virtual ~MockStatsObserver() {}
+
+ MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t));
+};
+
+class CallStatsTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ TickTime::UseFakeClock(12345);
+ call_stats_.reset(new CallStats());
+ }
+ rtc::scoped_ptr<CallStats> call_stats_;
+};
+
+TEST_F(CallStatsTest, AddAndTriggerCallback) {
+ MockStatsObserver stats_observer;
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_EQ(0, rtcp_rtt_stats->LastProcessedRtt());
+
+ const int64_t kRtt = 25;
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kRtt, kRtt)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ const int64_t kRttTimeOutMs = 1500 + 10;
+ TickTime::AdvanceFakeClock(kRttTimeOutMs);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+ EXPECT_EQ(0, rtcp_rtt_stats->LastProcessedRtt());
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+TEST_F(CallStatsTest, ProcessTime) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ rtcp_rtt_stats->OnRttUpdate(100);
+
+ // Time isn't updated yet.
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+
+ // Advance clock and verify we get an update.
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(1);
+ call_stats_->Process();
+
+ // Advance clock just too little to get an update.
+ TickTime::AdvanceFakeClock(999);
+ rtcp_rtt_stats->OnRttUpdate(100);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+
+ // Advance enough to trigger a new update.
+ TickTime::AdvanceFakeClock(1);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(1);
+ call_stats_->Process();
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+// Verify all observers get correct estimates and observers can be added and
+// removed.
+TEST_F(CallStatsTest, MultipleObservers) {
+ MockStatsObserver stats_observer_1;
+ call_stats_->RegisterStatsObserver(&stats_observer_1);
+ // Add the second observer twice, there should still be only one report to the
+ // observer.
+ MockStatsObserver stats_observer_2;
+ call_stats_->RegisterStatsObserver(&stats_observer_2);
+ call_stats_->RegisterStatsObserver(&stats_observer_2);
+
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ const int64_t kRtt = 100;
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+
+ // Verify both observers are updated.
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(1);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(1);
+ call_stats_->Process();
+
+ // Deregister the second observer and verify update is only sent to the first
+ // observer.
+ call_stats_->DeregisterStatsObserver(&stats_observer_2);
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(1);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0);
+ call_stats_->Process();
+
+ // Deregister the first observer.
+ call_stats_->DeregisterStatsObserver(&stats_observer_1);
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(0);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0);
+ call_stats_->Process();
+}
+
+// Verify increasing and decreasing rtt triggers callbacks with correct values.
+TEST_F(CallStatsTest, ChangeRtt) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+
+ // Advance clock to be ready for an update.
+ TickTime::AdvanceFakeClock(1000);
+
+ // Set a first value and verify the callback is triggered.
+ const int64_t kFirstRtt = 100;
+ rtcp_rtt_stats->OnRttUpdate(kFirstRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kFirstRtt, kFirstRtt)).Times(1);
+ call_stats_->Process();
+
+ // Increase rtt and verify the new value is reported.
+ TickTime::AdvanceFakeClock(1000);
+ const int64_t kHighRtt = kFirstRtt + 20;
+ const int64_t kAvgRtt1 = 103;
+ rtcp_rtt_stats->OnRttUpdate(kHighRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt1, kHighRtt)).Times(1);
+ call_stats_->Process();
+
+ // Increase time enough for a new update, but not too much to make the
+ // rtt invalid. Report a lower rtt and verify the old/high value still is sent
+ // in the callback.
+ TickTime::AdvanceFakeClock(1000);
+ const int64_t kLowRtt = kFirstRtt - 20;
+ const int64_t kAvgRtt2 = 102;
+ rtcp_rtt_stats->OnRttUpdate(kLowRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt2, kHighRtt)).Times(1);
+ call_stats_->Process();
+
+ // Advance time to make the high report invalid, the lower rtt should now be
+ // in the callback.
+ TickTime::AdvanceFakeClock(1000);
+ const int64_t kAvgRtt3 = 95;
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt3, kLowRtt)).Times(1);
+ call_stats_->Process();
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+TEST_F(CallStatsTest, LastProcessedRtt) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ TickTime::AdvanceFakeClock(1000);
+
+ // Set a first values and verify that LastProcessedRtt initially returns the
+ // average rtt.
+ const int64_t kRttLow = 10;
+ const int64_t kRttHigh = 30;
+ const int64_t kAvgRtt = 20;
+ rtcp_rtt_stats->OnRttUpdate(kRttLow);
+ rtcp_rtt_stats->OnRttUpdate(kRttHigh);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt, kRttHigh)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kAvgRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ // Update values and verify LastProcessedRtt.
+ TickTime::AdvanceFakeClock(1000);
+ rtcp_rtt_stats->OnRttUpdate(kRttLow);
+ rtcp_rtt_stats->OnRttUpdate(kRttHigh);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt, kRttHigh)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kAvgRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/encoder_state_feedback.cc b/webrtc/video_engine/encoder_state_feedback.cc
new file mode 100644
index 0000000000..1c376b2820
--- /dev/null
+++ b/webrtc/video_engine/encoder_state_feedback.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/encoder_state_feedback.h"
+
+#include <assert.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/video_engine/vie_encoder.h"
+
+namespace webrtc {
+
+// Helper class registered at the RTP module relaying callbacks to
+// EncoderStatFeedback.
+class EncoderStateFeedbackObserver : public RtcpIntraFrameObserver {
+ public:
+ explicit EncoderStateFeedbackObserver(EncoderStateFeedback* owner)
+ : owner_(owner) {}
+ ~EncoderStateFeedbackObserver() {}
+
+ // Implements RtcpIntraFrameObserver.
+ virtual void OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ owner_->OnReceivedIntraFrameRequest(ssrc);
+ }
+ virtual void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
+ owner_->OnReceivedSLI(ssrc, picture_id);
+ }
+ virtual void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
+ owner_->OnReceivedRPSI(ssrc, picture_id);
+ }
+
+ virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
+ owner_->OnLocalSsrcChanged(old_ssrc, new_ssrc);
+ }
+
+ private:
+ EncoderStateFeedback* owner_;
+};
+
+EncoderStateFeedback::EncoderStateFeedback()
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ observer_(new EncoderStateFeedbackObserver(this)) {}
+
+EncoderStateFeedback::~EncoderStateFeedback() {
+ assert(encoders_.empty());
+}
+
+void EncoderStateFeedback::AddEncoder(const std::vector<uint32_t>& ssrcs,
+ ViEEncoder* encoder) {
+ RTC_DCHECK(!ssrcs.empty());
+ CriticalSectionScoped lock(crit_.get());
+ for (uint32_t ssrc : ssrcs) {
+ RTC_DCHECK(encoders_.find(ssrc) == encoders_.end());
+ encoders_[ssrc] = encoder;
+ }
+}
+
+void EncoderStateFeedback::RemoveEncoder(const ViEEncoder* encoder) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.begin();
+ while (it != encoders_.end()) {
+ if (it->second == encoder) {
+ encoders_.erase(it++);
+ } else {
+ ++it;
+ }
+ }
+}
+
+RtcpIntraFrameObserver* EncoderStateFeedback::GetRtcpIntraFrameObserver() {
+ return observer_.get();
+}
+
+void EncoderStateFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedIntraFrameRequest(ssrc);
+}
+
+void EncoderStateFeedback::OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedSLI(ssrc, picture_id);
+}
+
+void EncoderStateFeedback::OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedRPSI(ssrc, picture_id);
+}
+
+void EncoderStateFeedback::OnLocalSsrcChanged(uint32_t old_ssrc,
+ uint32_t new_ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(old_ssrc);
+ if (it == encoders_.end() || encoders_.find(new_ssrc) != encoders_.end()) {
+ return;
+ }
+
+ ViEEncoder* encoder = it->second;
+ encoders_.erase(it);
+ encoders_[new_ssrc] = encoder;
+ encoder->OnLocalSsrcChanged(old_ssrc, new_ssrc);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/encoder_state_feedback.h b/webrtc/video_engine/encoder_state_feedback.h
new file mode 100644
index 0000000000..51e9111dfe
--- /dev/null
+++ b/webrtc/video_engine/encoder_state_feedback.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(mflodman) ViEEncoder has a time check to not send key frames too often,
+// move the logic to this class.
+
+#ifndef WEBRTC_VIDEO_ENGINE_ENCODER_STATE_FEEDBACK_H_
+#define WEBRTC_VIDEO_ENGINE_ENCODER_STATE_FEEDBACK_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class EncoderStateFeedbackObserver;
+class RtcpIntraFrameObserver;
+class ViEEncoder;
+
+class EncoderStateFeedback {
+ public:
+ friend class EncoderStateFeedbackObserver;
+
+ EncoderStateFeedback();
+ ~EncoderStateFeedback();
+
+ // Adds an encoder to receive feedback for a set of SSRCs.
+ void AddEncoder(const std::vector<uint32_t>& ssrc, ViEEncoder* encoder);
+
+ // Removes a registered ViEEncoder.
+ void RemoveEncoder(const ViEEncoder* encoder);
+
+ // Returns an observer to register at the requesting class. The observer has
+ // the same lifetime as the EncoderStateFeedback instance.
+ RtcpIntraFrameObserver* GetRtcpIntraFrameObserver();
+
+ protected:
+ // Called by EncoderStateFeedbackObserver when a new key frame is requested.
+ void OnReceivedIntraFrameRequest(uint32_t ssrc);
+ void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id);
+ void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id);
+ void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc);
+
+ private:
+ typedef std::map<uint32_t, ViEEncoder*> SsrcEncoderMap;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Instance registered at the class requesting new key frames.
+ rtc::scoped_ptr<EncoderStateFeedbackObserver> observer_;
+
+ // Maps a unique ssrc to the given encoder.
+ SsrcEncoderMap encoders_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(EncoderStateFeedback);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_ENCODER_STATE_FEEDBACK_H_
diff --git a/webrtc/video_engine/encoder_state_feedback_unittest.cc b/webrtc/video_engine/encoder_state_feedback_unittest.cc
new file mode 100644
index 0000000000..9787acc144
--- /dev/null
+++ b/webrtc/video_engine/encoder_state_feedback_unittest.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for EncoderStateFeedback.
+#include "webrtc/video_engine/encoder_state_feedback.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/include/paced_sender.h"
+#include "webrtc/modules/pacing/include/packet_router.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/video_engine/payload_router.h"
+#include "webrtc/video_engine/vie_encoder.h"
+
+using ::testing::NiceMock;
+
+namespace webrtc {
+
+class MockVieEncoder : public ViEEncoder {
+ public:
+ explicit MockVieEncoder(ProcessThread* process_thread, PacedSender* pacer)
+ : ViEEncoder(1, process_thread, nullptr, nullptr, pacer, nullptr) {}
+ ~MockVieEncoder() {}
+
+ MOCK_METHOD1(OnReceivedIntraFrameRequest,
+ void(uint32_t));
+ MOCK_METHOD2(OnReceivedSLI,
+ void(uint32_t ssrc, uint8_t picture_id));
+ MOCK_METHOD2(OnReceivedRPSI,
+ void(uint32_t ssrc, uint64_t picture_id));
+ MOCK_METHOD2(OnLocalSsrcChanged,
+ void(uint32_t old_ssrc, uint32_t new_ssrc));
+};
+
+class VieKeyRequestTest : public ::testing::Test {
+ protected:
+ VieKeyRequestTest()
+ : pacer_(Clock::GetRealTimeClock(),
+ &router_,
+ BitrateController::kDefaultStartBitrateKbps,
+ PacedSender::kDefaultPaceMultiplier *
+ BitrateController::kDefaultStartBitrateKbps,
+ 0) {}
+ virtual void SetUp() {
+ process_thread_.reset(new NiceMock<MockProcessThread>);
+ encoder_state_feedback_.reset(new EncoderStateFeedback());
+ }
+ rtc::scoped_ptr<MockProcessThread> process_thread_;
+ rtc::scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
+ PacketRouter router_;
+ PacedSender pacer_;
+};
+
+TEST_F(VieKeyRequestTest, CreateAndTriggerRequests) {
+ const int ssrc = 1234;
+ MockVieEncoder encoder(process_thread_.get(), &pacer_);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc), &encoder);
+
+ EXPECT_CALL(encoder, OnReceivedIntraFrameRequest(ssrc))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc);
+
+ const uint8_t sli_picture_id = 3;
+ EXPECT_CALL(encoder, OnReceivedSLI(ssrc, sli_picture_id))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc, sli_picture_id);
+
+ const uint64_t rpsi_picture_id = 9;
+ EXPECT_CALL(encoder, OnReceivedRPSI(ssrc, rpsi_picture_id))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc, rpsi_picture_id);
+
+ encoder_state_feedback_->RemoveEncoder(&encoder);
+}
+
+// Register multiple encoders and make sure the request is relayed to correct
+// ViEEncoder.
+TEST_F(VieKeyRequestTest, MultipleEncoders) {
+ const int ssrc_1 = 1234;
+ const int ssrc_2 = 5678;
+ MockVieEncoder encoder_1(process_thread_.get(), &pacer_);
+ MockVieEncoder encoder_2(process_thread_.get(), &pacer_);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc_1),
+ &encoder_1);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc_2),
+ &encoder_2);
+
+ EXPECT_CALL(encoder_1, OnReceivedIntraFrameRequest(ssrc_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_2);
+
+ const uint8_t sli_pid_1 = 3;
+ const uint8_t sli_pid_2 = 4;
+ EXPECT_CALL(encoder_1, OnReceivedSLI(ssrc_1, sli_pid_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedSLI(ssrc_2, sli_pid_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc_1, sli_pid_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc_2, sli_pid_2);
+
+ const uint64_t rpsi_pid_1 = 9;
+ const uint64_t rpsi_pid_2 = 10;
+ EXPECT_CALL(encoder_1, OnReceivedRPSI(ssrc_1, rpsi_pid_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedRPSI(ssrc_2, rpsi_pid_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc_1, rpsi_pid_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc_2, rpsi_pid_2);
+
+ encoder_state_feedback_->RemoveEncoder(&encoder_1);
+ EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_2);
+ encoder_state_feedback_->RemoveEncoder(&encoder_2);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/overuse_frame_detector.cc b/webrtc/video_engine/overuse_frame_detector.cc
new file mode 100644
index 0000000000..47a6e496b9
--- /dev/null
+++ b/webrtc/video_engine/overuse_frame_detector.cc
@@ -0,0 +1,422 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/overuse_frame_detector.h"
+
+#include <assert.h>
+#include <math.h>
+
+#include <algorithm>
+#include <list>
+#include <map>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/exp_filter.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+namespace {
+const int64_t kProcessIntervalMs = 5000;
+
+// Delay between consecutive rampups. (Used for quick recovery.)
+const int kQuickRampUpDelayMs = 10 * 1000;
+// Delay between rampup attempts. Initially uses standard, scales up to max.
+const int kStandardRampUpDelayMs = 40 * 1000;
+const int kMaxRampUpDelayMs = 240 * 1000;
+// Expontential back-off factor, to prevent annoying up-down behaviour.
+const double kRampUpBackoffFactor = 2.0;
+
+// Max number of overuses detected before always applying the rampup delay.
+const int kMaxOverusesBeforeApplyRampupDelay = 4;
+
+// The maximum exponent to use in VCMExpFilter.
+const float kSampleDiffMs = 33.0f;
+const float kMaxExp = 7.0f;
+
+} // namespace
+
+// Class for calculating the average encode time.
+class OveruseFrameDetector::EncodeTimeAvg {
+ public:
+ EncodeTimeAvg()
+ : kWeightFactor(0.5f),
+ kInitialAvgEncodeTimeMs(5.0f),
+ filtered_encode_time_ms_(new rtc::ExpFilter(kWeightFactor)) {
+ filtered_encode_time_ms_->Apply(1.0f, kInitialAvgEncodeTimeMs);
+ }
+ ~EncodeTimeAvg() {}
+
+ void AddSample(float encode_time_ms, int64_t diff_last_sample_ms) {
+ float exp = diff_last_sample_ms / kSampleDiffMs;
+ exp = std::min(exp, kMaxExp);
+ filtered_encode_time_ms_->Apply(exp, encode_time_ms);
+ }
+
+ int Value() const {
+ return static_cast<int>(filtered_encode_time_ms_->filtered() + 0.5);
+ }
+
+ private:
+ const float kWeightFactor;
+ const float kInitialAvgEncodeTimeMs;
+ rtc::scoped_ptr<rtc::ExpFilter> filtered_encode_time_ms_;
+};
+
+// Class for calculating the processing usage on the send-side (the average
+// processing time of a frame divided by the average time difference between
+// captured frames).
+class OveruseFrameDetector::SendProcessingUsage {
+ public:
+ explicit SendProcessingUsage(const CpuOveruseOptions& options)
+ : kWeightFactorFrameDiff(0.998f),
+ kWeightFactorProcessing(0.995f),
+ kInitialSampleDiffMs(40.0f),
+ kMaxSampleDiffMs(45.0f),
+ count_(0),
+ options_(options),
+ filtered_processing_ms_(new rtc::ExpFilter(kWeightFactorProcessing)),
+ filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
+ Reset();
+ }
+ ~SendProcessingUsage() {}
+
+ void Reset() {
+ count_ = 0;
+ filtered_frame_diff_ms_->Reset(kWeightFactorFrameDiff);
+ filtered_frame_diff_ms_->Apply(1.0f, kInitialSampleDiffMs);
+ filtered_processing_ms_->Reset(kWeightFactorProcessing);
+ filtered_processing_ms_->Apply(1.0f, InitialProcessingMs());
+ }
+
+ void AddCaptureSample(float sample_ms) {
+ float exp = sample_ms / kSampleDiffMs;
+ exp = std::min(exp, kMaxExp);
+ filtered_frame_diff_ms_->Apply(exp, sample_ms);
+ }
+
+ void AddSample(float processing_ms, int64_t diff_last_sample_ms) {
+ ++count_;
+ float exp = diff_last_sample_ms / kSampleDiffMs;
+ exp = std::min(exp, kMaxExp);
+ filtered_processing_ms_->Apply(exp, processing_ms);
+ }
+
+ int Value() const {
+ if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
+ return static_cast<int>(InitialUsageInPercent() + 0.5f);
+ }
+ float frame_diff_ms = std::max(filtered_frame_diff_ms_->filtered(), 1.0f);
+ frame_diff_ms = std::min(frame_diff_ms, kMaxSampleDiffMs);
+ float encode_usage_percent =
+ 100.0f * filtered_processing_ms_->filtered() / frame_diff_ms;
+ return static_cast<int>(encode_usage_percent + 0.5);
+ }
+
+ private:
+ float InitialUsageInPercent() const {
+ // Start in between the underuse and overuse threshold.
+ return (options_.low_encode_usage_threshold_percent +
+ options_.high_encode_usage_threshold_percent) / 2.0f;
+ }
+
+ float InitialProcessingMs() const {
+ return InitialUsageInPercent() * kInitialSampleDiffMs / 100;
+ }
+
+ const float kWeightFactorFrameDiff;
+ const float kWeightFactorProcessing;
+ const float kInitialSampleDiffMs;
+ const float kMaxSampleDiffMs;
+ uint64_t count_;
+ const CpuOveruseOptions options_;
+ rtc::scoped_ptr<rtc::ExpFilter> filtered_processing_ms_;
+ rtc::scoped_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
+};
+
+// Class for calculating the processing time of frames.
+class OveruseFrameDetector::FrameQueue {
+ public:
+ FrameQueue() : last_processing_time_ms_(-1) {}
+ ~FrameQueue() {}
+
+ // Called when a frame is captured.
+ // Starts the measuring of the processing time of the frame.
+ void Start(int64_t capture_time, int64_t now) {
+ const size_t kMaxSize = 90; // Allows for processing time of 1.5s at 60fps.
+ if (frame_times_.size() > kMaxSize) {
+ LOG(LS_WARNING) << "Max size reached, removed oldest frame.";
+ frame_times_.erase(frame_times_.begin());
+ }
+ if (frame_times_.find(capture_time) != frame_times_.end()) {
+ // Frame should not exist.
+ assert(false);
+ return;
+ }
+ frame_times_[capture_time] = now;
+ }
+
+ // Called when the processing of a frame has finished.
+ // Returns the processing time of the frame.
+ int End(int64_t capture_time, int64_t now) {
+ std::map<int64_t, int64_t>::iterator it = frame_times_.find(capture_time);
+ if (it == frame_times_.end()) {
+ return -1;
+ }
+ // Remove any old frames up to current.
+ // Old frames have been skipped by the capture process thread.
+ // TODO(asapersson): Consider measuring time from first frame in list.
+ last_processing_time_ms_ = now - (*it).second;
+ frame_times_.erase(frame_times_.begin(), ++it);
+ return last_processing_time_ms_;
+ }
+
+ void Reset() { frame_times_.clear(); }
+ int NumFrames() const { return static_cast<int>(frame_times_.size()); }
+ int last_processing_time_ms() const { return last_processing_time_ms_; }
+
+ private:
+ // Captured frames mapped by the capture time.
+ std::map<int64_t, int64_t> frame_times_;
+ int last_processing_time_ms_;
+};
+
+
+OveruseFrameDetector::OveruseFrameDetector(
+ Clock* clock,
+ const CpuOveruseOptions& options,
+ CpuOveruseObserver* observer,
+ CpuOveruseMetricsObserver* metrics_observer)
+ : options_(options),
+ observer_(observer),
+ metrics_observer_(metrics_observer),
+ clock_(clock),
+ num_process_times_(0),
+ last_capture_time_(0),
+ num_pixels_(0),
+ next_process_time_(clock_->TimeInMilliseconds()),
+ last_overuse_time_(0),
+ checks_above_threshold_(0),
+ num_overuse_detections_(0),
+ last_rampup_time_(0),
+ in_quick_rampup_(false),
+ current_rampup_delay_ms_(kStandardRampUpDelayMs),
+ last_encode_sample_ms_(0),
+ last_sample_time_ms_(0),
+ encode_time_(new EncodeTimeAvg()),
+ usage_(new SendProcessingUsage(options)),
+ frame_queue_(new FrameQueue()) {
+ RTC_DCHECK(metrics_observer != nullptr);
+ // Make sure stats are initially up-to-date. This simplifies unit testing
+ // since we don't have to trigger an update using one of the methods which
+ // would also alter the overuse state.
+ UpdateCpuOveruseMetrics();
+ processing_thread_.DetachFromThread();
+}
+
+OveruseFrameDetector::~OveruseFrameDetector() {
+}
+
+int OveruseFrameDetector::LastProcessingTimeMs() const {
+ rtc::CritScope cs(&crit_);
+ return frame_queue_->last_processing_time_ms();
+}
+
+int OveruseFrameDetector::FramesInQueue() const {
+ rtc::CritScope cs(&crit_);
+ return frame_queue_->NumFrames();
+}
+
+void OveruseFrameDetector::UpdateCpuOveruseMetrics() {
+ metrics_.avg_encode_time_ms = encode_time_->Value();
+ metrics_.encode_usage_percent = usage_->Value();
+
+ metrics_observer_->CpuOveruseMetricsUpdated(metrics_);
+}
+
+int64_t OveruseFrameDetector::TimeUntilNextProcess() {
+ RTC_DCHECK(processing_thread_.CalledOnValidThread());
+ return next_process_time_ - clock_->TimeInMilliseconds();
+}
+
+bool OveruseFrameDetector::FrameSizeChanged(int num_pixels) const {
+ if (num_pixels != num_pixels_) {
+ return true;
+ }
+ return false;
+}
+
+bool OveruseFrameDetector::FrameTimeoutDetected(int64_t now) const {
+ if (last_capture_time_ == 0) {
+ return false;
+ }
+ return (now - last_capture_time_) > options_.frame_timeout_interval_ms;
+}
+
+void OveruseFrameDetector::ResetAll(int num_pixels) {
+ num_pixels_ = num_pixels;
+ usage_->Reset();
+ frame_queue_->Reset();
+ last_capture_time_ = 0;
+ num_process_times_ = 0;
+ UpdateCpuOveruseMetrics();
+}
+
+void OveruseFrameDetector::FrameCaptured(int width,
+ int height,
+ int64_t capture_time_ms) {
+ rtc::CritScope cs(&crit_);
+
+ int64_t now = clock_->TimeInMilliseconds();
+ if (FrameSizeChanged(width * height) || FrameTimeoutDetected(now)) {
+ ResetAll(width * height);
+ }
+
+ if (last_capture_time_ != 0)
+ usage_->AddCaptureSample(now - last_capture_time_);
+
+ last_capture_time_ = now;
+
+ if (options_.enable_extended_processing_usage) {
+ frame_queue_->Start(capture_time_ms, now);
+ }
+}
+
+void OveruseFrameDetector::FrameEncoded(int encode_time_ms) {
+ rtc::CritScope cs(&crit_);
+ int64_t now = clock_->TimeInMilliseconds();
+ if (last_encode_sample_ms_ != 0) {
+ int64_t diff_ms = now - last_encode_sample_ms_;
+ encode_time_->AddSample(encode_time_ms, diff_ms);
+ }
+ last_encode_sample_ms_ = now;
+
+ if (!options_.enable_extended_processing_usage) {
+ AddProcessingTime(encode_time_ms);
+ }
+ UpdateCpuOveruseMetrics();
+}
+
+void OveruseFrameDetector::FrameSent(int64_t capture_time_ms) {
+ rtc::CritScope cs(&crit_);
+ if (!options_.enable_extended_processing_usage) {
+ return;
+ }
+ int delay_ms = frame_queue_->End(capture_time_ms,
+ clock_->TimeInMilliseconds());
+ if (delay_ms > 0) {
+ AddProcessingTime(delay_ms);
+ }
+ UpdateCpuOveruseMetrics();
+}
+
+void OveruseFrameDetector::AddProcessingTime(int elapsed_ms) {
+ int64_t now = clock_->TimeInMilliseconds();
+ if (last_sample_time_ms_ != 0) {
+ int64_t diff_ms = now - last_sample_time_ms_;
+ usage_->AddSample(elapsed_ms, diff_ms);
+ }
+ last_sample_time_ms_ = now;
+}
+
+int32_t OveruseFrameDetector::Process() {
+ RTC_DCHECK(processing_thread_.CalledOnValidThread());
+
+ int64_t now = clock_->TimeInMilliseconds();
+
+ // Used to protect against Process() being called too often.
+ if (now < next_process_time_)
+ return 0;
+
+ next_process_time_ = now + kProcessIntervalMs;
+
+ CpuOveruseMetrics current_metrics;
+ {
+ rtc::CritScope cs(&crit_);
+ ++num_process_times_;
+
+ current_metrics = metrics_;
+ if (num_process_times_ <= options_.min_process_count)
+ return 0;
+ }
+
+ if (IsOverusing(current_metrics)) {
+ // If the last thing we did was going up, and now have to back down, we need
+ // to check if this peak was short. If so we should back off to avoid going
+ // back and forth between this load, the system doesn't seem to handle it.
+ bool check_for_backoff = last_rampup_time_ > last_overuse_time_;
+ if (check_for_backoff) {
+ if (now - last_rampup_time_ < kStandardRampUpDelayMs ||
+ num_overuse_detections_ > kMaxOverusesBeforeApplyRampupDelay) {
+ // Going up was not ok for very long, back off.
+ current_rampup_delay_ms_ *= kRampUpBackoffFactor;
+ if (current_rampup_delay_ms_ > kMaxRampUpDelayMs)
+ current_rampup_delay_ms_ = kMaxRampUpDelayMs;
+ } else {
+ // Not currently backing off, reset rampup delay.
+ current_rampup_delay_ms_ = kStandardRampUpDelayMs;
+ }
+ }
+
+ last_overuse_time_ = now;
+ in_quick_rampup_ = false;
+ checks_above_threshold_ = 0;
+ ++num_overuse_detections_;
+
+ if (observer_ != NULL)
+ observer_->OveruseDetected();
+ } else if (IsUnderusing(current_metrics, now)) {
+ last_rampup_time_ = now;
+ in_quick_rampup_ = true;
+
+ if (observer_ != NULL)
+ observer_->NormalUsage();
+ }
+
+ int rampup_delay =
+ in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
+
+ LOG(LS_VERBOSE) << " Frame stats: "
+ << " encode usage " << current_metrics.encode_usage_percent
+ << " overuse detections " << num_overuse_detections_
+ << " rampup delay " << rampup_delay;
+
+ return 0;
+}
+
+bool OveruseFrameDetector::IsOverusing(const CpuOveruseMetrics& metrics) {
+ bool overusing = false;
+ if (options_.enable_encode_usage_method) {
+ overusing = metrics.encode_usage_percent >=
+ options_.high_encode_usage_threshold_percent;
+ }
+ if (overusing) {
+ ++checks_above_threshold_;
+ } else {
+ checks_above_threshold_ = 0;
+ }
+ return checks_above_threshold_ >= options_.high_threshold_consecutive_count;
+}
+
+bool OveruseFrameDetector::IsUnderusing(const CpuOveruseMetrics& metrics,
+ int64_t time_now) {
+ int delay = in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
+ if (time_now < last_rampup_time_ + delay)
+ return false;
+
+ bool underusing = false;
+ if (options_.enable_encode_usage_method) {
+ underusing = metrics.encode_usage_percent <
+ options_.low_encode_usage_threshold_percent;
+ }
+ return underusing;
+}
+} // namespace webrtc
diff --git a/webrtc/video_engine/overuse_frame_detector.h b/webrtc/video_engine/overuse_frame_detector.h
new file mode 100644
index 0000000000..aff4b43025
--- /dev/null
+++ b/webrtc/video_engine/overuse_frame_detector.h
@@ -0,0 +1,181 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
+#define WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/exp_filter.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/modules/interface/module.h"
+
+namespace webrtc {
+
+class Clock;
+
+// CpuOveruseObserver is called when a system overuse is detected and
+// VideoEngine cannot keep up the encoding frequency.
+class CpuOveruseObserver {
+ public:
+ // Called as soon as an overuse is detected.
+ virtual void OveruseDetected() = 0;
+ // Called periodically when the system is not overused any longer.
+ virtual void NormalUsage() = 0;
+
+ protected:
+ virtual ~CpuOveruseObserver() {}
+};
+
+struct CpuOveruseOptions {
+ CpuOveruseOptions()
+ : enable_encode_usage_method(true),
+ low_encode_usage_threshold_percent(55),
+ high_encode_usage_threshold_percent(85),
+ enable_extended_processing_usage(true),
+ frame_timeout_interval_ms(1500),
+ min_frame_samples(120),
+ min_process_count(3),
+ high_threshold_consecutive_count(2) {}
+
+ // Method based on encode time of frames.
+ bool enable_encode_usage_method;
+ int low_encode_usage_threshold_percent; // Threshold for triggering underuse.
+ int high_encode_usage_threshold_percent; // Threshold for triggering overuse.
+ bool enable_extended_processing_usage; // Include a larger time span (in
+ // addition to encode time) for
+ // measuring the processing time of a
+ // frame.
+ // General settings.
+ int frame_timeout_interval_ms; // The maximum allowed interval between two
+ // frames before resetting estimations.
+ int min_frame_samples; // The minimum number of frames required.
+ int min_process_count; // The number of initial process times required before
+ // triggering an overuse/underuse.
+ int high_threshold_consecutive_count; // The number of consecutive checks
+ // above the high threshold before
+ // triggering an overuse.
+};
+
+struct CpuOveruseMetrics {
+ CpuOveruseMetrics()
+ : avg_encode_time_ms(-1),
+ encode_usage_percent(-1) {}
+
+ int avg_encode_time_ms; // The average encode time in ms.
+ int encode_usage_percent; // The average encode time divided by the average
+ // time difference between incoming captured frames.
+};
+
+class CpuOveruseMetricsObserver {
+ public:
+ virtual ~CpuOveruseMetricsObserver() {}
+ virtual void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) = 0;
+};
+
+
+// Use to detect system overuse based on the send-side processing time of
+// incoming frames.
+class OveruseFrameDetector : public Module {
+ public:
+ OveruseFrameDetector(Clock* clock,
+ const CpuOveruseOptions& options,
+ CpuOveruseObserver* overuse_observer,
+ CpuOveruseMetricsObserver* metrics_observer);
+ ~OveruseFrameDetector();
+
+ // Called for each captured frame.
+ void FrameCaptured(int width, int height, int64_t capture_time_ms);
+
+ // Called for each encoded frame.
+ void FrameEncoded(int encode_time_ms);
+
+ // Called for each sent frame.
+ void FrameSent(int64_t capture_time_ms);
+
+ // Only public for testing.
+ int LastProcessingTimeMs() const;
+ int FramesInQueue() const;
+
+ // Implements Module.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ private:
+ class EncodeTimeAvg;
+ class SendProcessingUsage;
+ class FrameQueue;
+
+ void UpdateCpuOveruseMetrics() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // TODO(asapersson): This method is only used on one thread, so it shouldn't
+ // need a guard.
+ void AddProcessingTime(int elapsed_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Only called on the processing thread.
+ bool IsOverusing(const CpuOveruseMetrics& metrics);
+ bool IsUnderusing(const CpuOveruseMetrics& metrics, int64_t time_now);
+
+ bool FrameTimeoutDetected(int64_t now) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ bool FrameSizeChanged(int num_pixels) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ void ResetAll(int num_pixels) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Protecting all members except const and those that are only accessed on the
+ // processing thread.
+ // TODO(asapersson): See if we can reduce locking. As is, video frame
+ // processing contends with reading stats and the processing thread.
+ mutable rtc::CriticalSection crit_;
+
+ const CpuOveruseOptions options_;
+
+ // Observer getting overuse reports.
+ CpuOveruseObserver* const observer_;
+
+ // Stats metrics.
+ CpuOveruseMetricsObserver* const metrics_observer_;
+ CpuOveruseMetrics metrics_ GUARDED_BY(crit_);
+
+ Clock* const clock_;
+ int64_t num_process_times_ GUARDED_BY(crit_);
+
+ int64_t last_capture_time_ GUARDED_BY(crit_);
+
+ // Number of pixels of last captured frame.
+ int num_pixels_ GUARDED_BY(crit_);
+
+ // These seven members are only accessed on the processing thread.
+ int64_t next_process_time_;
+ int64_t last_overuse_time_;
+ int checks_above_threshold_;
+ int num_overuse_detections_;
+ int64_t last_rampup_time_;
+ bool in_quick_rampup_;
+ int current_rampup_delay_ms_;
+
+ int64_t last_encode_sample_ms_; // Only accessed by one thread.
+ int64_t last_sample_time_ms_; // Only accessed by one thread.
+
+ // TODO(asapersson): Can these be regular members (avoid separate heap
+ // allocs)?
+ const rtc::scoped_ptr<EncodeTimeAvg> encode_time_ GUARDED_BY(crit_);
+ const rtc::scoped_ptr<SendProcessingUsage> usage_ GUARDED_BY(crit_);
+ const rtc::scoped_ptr<FrameQueue> frame_queue_ GUARDED_BY(crit_);
+
+ rtc::ThreadChecker processing_thread_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
diff --git a/webrtc/video_engine/overuse_frame_detector_unittest.cc b/webrtc/video_engine/overuse_frame_detector_unittest.cc
new file mode 100644
index 0000000000..d502f02204
--- /dev/null
+++ b/webrtc/video_engine/overuse_frame_detector_unittest.cc
@@ -0,0 +1,405 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/overuse_frame_detector.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace {
+ const int kWidth = 640;
+ const int kHeight = 480;
+ const int kFrameInterval33ms = 33;
+ const int kProcessIntervalMs = 5000;
+ const int kProcessTime5ms = 5;
+} // namespace
+
+class MockCpuOveruseObserver : public CpuOveruseObserver {
+ public:
+ MockCpuOveruseObserver() {}
+ virtual ~MockCpuOveruseObserver() {}
+
+ MOCK_METHOD0(OveruseDetected, void());
+ MOCK_METHOD0(NormalUsage, void());
+};
+
+class CpuOveruseObserverImpl : public CpuOveruseObserver {
+ public:
+ CpuOveruseObserverImpl() :
+ overuse_(0),
+ normaluse_(0) {}
+ virtual ~CpuOveruseObserverImpl() {}
+
+ void OveruseDetected() { ++overuse_; }
+ void NormalUsage() { ++normaluse_; }
+
+ int overuse_;
+ int normaluse_;
+};
+
+class OveruseFrameDetectorTest : public ::testing::Test,
+ public CpuOveruseMetricsObserver {
+ protected:
+ virtual void SetUp() {
+ clock_.reset(new SimulatedClock(1234));
+ observer_.reset(new MockCpuOveruseObserver());
+ options_.min_process_count = 0;
+ ReinitializeOveruseDetector();
+ }
+
+ void ReinitializeOveruseDetector() {
+ overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
+ observer_.get(), this));
+ }
+
+ void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override {
+ metrics_ = metrics;
+ }
+
+ int InitialUsage() {
+ return ((options_.low_encode_usage_threshold_percent +
+ options_.high_encode_usage_threshold_percent) / 2.0f) + 0.5;
+ }
+
+ void InsertAndSendFramesWithInterval(
+ int num_frames, int interval_ms, int width, int height, int delay_ms) {
+ while (num_frames-- > 0) {
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
+ overuse_detector_->FrameCaptured(width, height, capture_time_ms);
+ clock_->AdvanceTimeMilliseconds(delay_ms);
+ overuse_detector_->FrameEncoded(delay_ms);
+ overuse_detector_->FrameSent(capture_time_ms);
+ clock_->AdvanceTimeMilliseconds(interval_ms - delay_ms);
+ }
+ }
+
+ void TriggerOveruse(int num_times) {
+ const int kDelayMs = 32;
+ for (int i = 0; i < num_times; ++i) {
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kDelayMs);
+ overuse_detector_->Process();
+ }
+ }
+
+ void TriggerUnderuse() {
+ const int kDelayMs1 = 5;
+ const int kDelayMs2 = 6;
+ InsertAndSendFramesWithInterval(
+ 1300, kFrameInterval33ms, kWidth, kHeight, kDelayMs1);
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight, kDelayMs2);
+ overuse_detector_->Process();
+ }
+
+ int AvgEncodeTimeMs() { return metrics_.avg_encode_time_ms; }
+
+ int UsagePercent() { return metrics_.encode_usage_percent; }
+
+ CpuOveruseOptions options_;
+ rtc::scoped_ptr<SimulatedClock> clock_;
+ rtc::scoped_ptr<MockCpuOveruseObserver> observer_;
+ rtc::scoped_ptr<OveruseFrameDetector> overuse_detector_;
+ CpuOveruseMetrics metrics_;
+};
+
+
+// enable_encode_usage_method = true;
+// enable_extended_processing_usage = false;
+// UsagePercent() > high_encode_usage_threshold_percent => overuse.
+// UsagePercent() < low_encode_usage_threshold_percent => underuse.
+TEST_F(OveruseFrameDetectorTest, TriggerOveruse) {
+ options_.enable_extended_processing_usage = false;
+ ReinitializeOveruseDetector();
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) {
+ options_.enable_extended_processing_usage = false;
+ ReinitializeOveruseDetector();
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ // usage < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithNoObserver) {
+ options_.enable_extended_processing_usage = false;
+ overuse_detector_.reset(
+ new OveruseFrameDetector(clock_.get(), options_, nullptr, this));
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithMethodDisabled) {
+ options_.enable_encode_usage_method = false;
+ options_.enable_extended_processing_usage = false;
+ ReinitializeOveruseDetector();
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) {
+ options_.enable_extended_processing_usage = false;
+ ReinitializeOveruseDetector();
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(2);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, TriggerUnderuseWithMinProcessCount) {
+ options_.enable_extended_processing_usage = false;
+ options_.min_process_count = 1;
+ CpuOveruseObserverImpl overuse_observer;
+ overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
+ &overuse_observer, this));
+ InsertAndSendFramesWithInterval(
+ 1200, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ overuse_detector_->Process();
+ EXPECT_EQ(0, overuse_observer.normaluse_);
+ clock_->AdvanceTimeMilliseconds(kProcessIntervalMs);
+ overuse_detector_->Process();
+ EXPECT_EQ(1, overuse_observer.normaluse_);
+}
+
+TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) {
+ options_.enable_extended_processing_usage = false;
+ ReinitializeOveruseDetector();
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(64);
+ for(size_t i = 0; i < 64; ++i) {
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ }
+}
+
+TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ options_.enable_extended_processing_usage = false;
+ options_.high_threshold_consecutive_count = 2;
+ ReinitializeOveruseDetector();
+ TriggerOveruse(2);
+}
+
+TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ options_.enable_extended_processing_usage = false;
+ options_.high_threshold_consecutive_count = 2;
+ ReinitializeOveruseDetector();
+ TriggerOveruse(1);
+}
+
+TEST_F(OveruseFrameDetectorTest, ProcessingUsage) {
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_EQ(kProcessTime5ms * 100 / kFrameInterval33ms, UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, ResetAfterResolutionChange) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ // Verify reset.
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight + 1, kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, ResetAfterFrameTimeout) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 2, options_.frame_timeout_interval_ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ // Verify reset.
+ InsertAndSendFramesWithInterval(
+ 2, options_.frame_timeout_interval_ms + 1, kWidth, kHeight,
+ kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdating) {
+ options_.min_frame_samples = 40;
+ ReinitializeOveruseDetector();
+ InsertAndSendFramesWithInterval(
+ 40, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, InitialProcessingUsage) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrameDisabled) {
+ options_.enable_extended_processing_usage = false;
+ ReinitializeOveruseDetector();
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrame) {
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_TwoFrames) {
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ const int kProcessingTimeMs1 = 100;
+ const int kProcessingTimeMs2 = 50;
+ const int kTimeBetweenFramesMs = 200;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs1);
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs1, overuse_detector_->LastProcessingTimeMs());
+ clock_->AdvanceTimeMilliseconds(kTimeBetweenFramesMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs2);
+ overuse_detector_->FrameSent(66);
+ EXPECT_EQ(kProcessingTimeMs2, overuse_detector_->LastProcessingTimeMs());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_MaxQueueSize) {
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ const int kMaxQueueSize = 91;
+ for (int i = 0; i < kMaxQueueSize * 2; ++i) {
+ overuse_detector_->FrameCaptured(kWidth, kHeight, i);
+ }
+ EXPECT_EQ(kMaxQueueSize, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_NonProcessedFramesRemoved) {
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 35);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 99);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(4, overuse_detector_->FramesInQueue());
+ overuse_detector_->FrameSent(66);
+ // Frame 33, 35 removed, 66 processed, 99 not processed.
+ EXPECT_EQ(2 * kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ overuse_detector_->FrameSent(99);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_ResetClearsFrames) {
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ // Verify reset (resolution changed).
+ overuse_detector_->FrameCaptured(kWidth, kHeight + 1, 66);
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameSent(66);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_NonMatchingSendFrameIgnored) {
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameSent(34);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+}
+
+TEST_F(OveruseFrameDetectorTest, EncodedFrame) {
+ const int kInitialAvgEncodeTimeInMs = 5;
+ EXPECT_EQ(kInitialAvgEncodeTimeInMs, AvgEncodeTimeMs());
+ for (int i = 0; i < 30; i++) {
+ clock_->AdvanceTimeMilliseconds(33);
+ overuse_detector_->FrameEncoded(2);
+ }
+ EXPECT_EQ(2, AvgEncodeTimeMs());
+}
+
+// enable_encode_usage_method = true;
+// enable_extended_processing_usage = true;
+// UsagePercent() > high_encode_usage_threshold_percent => overuse.
+// UsagePercent() < low_encode_usage_threshold_percent => underuse.
+TEST_F(OveruseFrameDetectorTest, TriggerOveruseWithExtendedProcessingUsage) {
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithExtendedProcessingUsage) {
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ // usage < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest,
+ OveruseAndRecoverWithExtendedProcessingUsageMethodDisabled) {
+ options_.enable_encode_usage_method = false;
+ options_.enable_extended_processing_usage = true;
+ ReinitializeOveruseDetector();
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ // usage < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerUnderuse();
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/payload_router.cc b/webrtc/video_engine/payload_router.cc
new file mode 100644
index 0000000000..3af3d4829e
--- /dev/null
+++ b/webrtc/video_engine/payload_router.cc
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/payload_router.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+PayloadRouter::PayloadRouter()
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ active_(false) {}
+
+PayloadRouter::~PayloadRouter() {}
+
+size_t PayloadRouter::DefaultMaxPayloadLength() {
+ const size_t kIpUdpSrtpLength = 44;
+ return IP_PACKET_SIZE - kIpUdpSrtpLength;
+}
+
+void PayloadRouter::SetSendingRtpModules(
+ const std::list<RtpRtcp*>& rtp_modules) {
+ CriticalSectionScoped cs(crit_.get());
+ rtp_modules_.clear();
+ rtp_modules_.reserve(rtp_modules.size());
+ for (auto* rtp_module : rtp_modules) {
+ rtp_modules_.push_back(rtp_module);
+ }
+}
+
+void PayloadRouter::set_active(bool active) {
+ CriticalSectionScoped cs(crit_.get());
+ active_ = active;
+}
+
+bool PayloadRouter::active() {
+ CriticalSectionScoped cs(crit_.get());
+ return active_ && !rtp_modules_.empty();
+}
+
+bool PayloadRouter::RoutePayload(FrameType frame_type,
+ int8_t payload_type,
+ uint32_t time_stamp,
+ int64_t capture_time_ms,
+ const uint8_t* payload_data,
+ size_t payload_length,
+ const RTPFragmentationHeader* fragmentation,
+ const RTPVideoHeader* rtp_video_hdr) {
+ CriticalSectionScoped cs(crit_.get());
+ if (!active_ || rtp_modules_.empty())
+ return false;
+
+ // The simulcast index might actually be larger than the number of modules in
+ // case the encoder was processing a frame during a codec reconfig.
+ if (rtp_video_hdr != NULL &&
+ rtp_video_hdr->simulcastIdx >= rtp_modules_.size())
+ return false;
+
+ int stream_idx = 0;
+ if (rtp_video_hdr != NULL)
+ stream_idx = rtp_video_hdr->simulcastIdx;
+ return rtp_modules_[stream_idx]->SendOutgoingData(
+ frame_type, payload_type, time_stamp, capture_time_ms, payload_data,
+ payload_length, fragmentation, rtp_video_hdr) == 0 ? true : false;
+}
+
+void PayloadRouter::SetTargetSendBitrates(
+ const std::vector<uint32_t>& stream_bitrates) {
+ CriticalSectionScoped cs(crit_.get());
+ if (stream_bitrates.size() < rtp_modules_.size()) {
+ // There can be a size mis-match during codec reconfiguration.
+ return;
+ }
+ int idx = 0;
+ for (auto* rtp_module : rtp_modules_) {
+ rtp_module->SetTargetSendBitrate(stream_bitrates[idx++]);
+ }
+}
+
+size_t PayloadRouter::MaxPayloadLength() const {
+ size_t min_payload_length = DefaultMaxPayloadLength();
+ CriticalSectionScoped cs(crit_.get());
+ for (auto* rtp_module : rtp_modules_) {
+ size_t module_payload_length = rtp_module->MaxDataPayloadLength();
+ if (module_payload_length < min_payload_length)
+ min_payload_length = module_payload_length;
+ }
+ return min_payload_length;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/payload_router.h b/webrtc/video_engine/payload_router.h
new file mode 100644
index 0000000000..17bc279290
--- /dev/null
+++ b/webrtc/video_engine/payload_router.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_PAYLOAD_ROUTER_H_
+#define WEBRTC_VIDEO_ENGINE_PAYLOAD_ROUTER_H_
+
+#include <list>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_types.h"
+#include "webrtc/system_wrappers/include/atomic32.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RTPFragmentationHeader;
+class RtpRtcp;
+struct RTPVideoHeader;
+
+// PayloadRouter routes outgoing data to the correct sending RTP module, based
+// on the simulcast layer in RTPVideoHeader.
+class PayloadRouter {
+ public:
+ PayloadRouter();
+ ~PayloadRouter();
+
+ static size_t DefaultMaxPayloadLength();
+
+ // Rtp modules are assumed to be sorted in simulcast index order.
+ void SetSendingRtpModules(const std::list<RtpRtcp*>& rtp_modules);
+
+ // PayloadRouter will only route packets if being active, all packets will be
+ // dropped otherwise.
+ void set_active(bool active);
+ bool active();
+
+ // Input parameters according to the signature of RtpRtcp::SendOutgoingData.
+ // Returns true if the packet was routed / sent, false otherwise.
+ bool RoutePayload(FrameType frame_type,
+ int8_t payload_type,
+ uint32_t time_stamp,
+ int64_t capture_time_ms,
+ const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* fragmentation,
+ const RTPVideoHeader* rtp_video_hdr);
+
+ // Configures current target bitrate per module. 'stream_bitrates' is assumed
+ // to be in the same order as 'SetSendingRtpModules'.
+ void SetTargetSendBitrates(const std::vector<uint32_t>& stream_bitrates);
+
+ // Returns the maximum allowed data payload length, given the configured MTU
+ // and RTP headers.
+ size_t MaxPayloadLength() const;
+
+ void AddRef() { ++ref_count_; }
+ void Release() { if (--ref_count_ == 0) { delete this; } }
+
+ private:
+ // TODO(mflodman): When the new video API has launched, remove crit_ and
+ // assume rtp_modules_ will never change during a call.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Active sending RTP modules, in layer order.
+ std::vector<RtpRtcp*> rtp_modules_ GUARDED_BY(crit_.get());
+ bool active_ GUARDED_BY(crit_.get());
+
+ Atomic32 ref_count_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(PayloadRouter);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_PAYLOAD_ROUTER_H_
diff --git a/webrtc/video_engine/payload_router_unittest.cc b/webrtc/video_engine/payload_router_unittest.cc
new file mode 100644
index 0000000000..de391576d8
--- /dev/null
+++ b/webrtc/video_engine/payload_router_unittest.cc
@@ -0,0 +1,209 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <list>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "webrtc/video_engine/payload_router.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::NiceMock;
+using ::testing::Return;
+
+namespace webrtc {
+
+class PayloadRouterTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ payload_router_.reset(new PayloadRouter());
+ }
+ rtc::scoped_ptr<PayloadRouter> payload_router_;
+};
+
+TEST_F(PayloadRouterTest, SendOnOneModule) {
+ MockRtpRtcp rtp;
+ std::list<RtpRtcp*> modules(1, &rtp);
+
+ payload_router_->SetSendingRtpModules(modules);
+
+ uint8_t payload = 'a';
+ FrameType frame_type = kVideoFrameKey;
+ int8_t payload_type = 96;
+
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(1);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(false);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(1);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ modules.clear();
+ payload_router_->SetSendingRtpModules(modules);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+}
+
+TEST_F(PayloadRouterTest, SendSimulcast) {
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+
+ payload_router_->SetSendingRtpModules(modules);
+
+ uint8_t payload_1 = 'a';
+ FrameType frame_type_1 = kVideoFrameKey;
+ int8_t payload_type_1 = 96;
+ RTPVideoHeader rtp_hdr_1;
+ rtp_hdr_1.simulcastIdx = 0;
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp_1, SendOutgoingData(frame_type_1, payload_type_1, 0, 0, _, 1,
+ NULL, &rtp_hdr_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+
+ uint8_t payload_2 = 'b';
+ FrameType frame_type_2 = kVideoFrameDelta;
+ int8_t payload_type_2 = 97;
+ RTPVideoHeader rtp_hdr_2;
+ rtp_hdr_2.simulcastIdx = 1;
+ EXPECT_CALL(rtp_2, SendOutgoingData(frame_type_2, payload_type_2, 0, 0, _, 1,
+ NULL, &rtp_hdr_2))
+ .Times(1);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type_2, payload_type_2, 0, 0,
+ &payload_2, 1, NULL, &rtp_hdr_2));
+
+ // Inactive.
+ payload_router_->set_active(false);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_2, payload_type_2, 0, 0,
+ &payload_2, 1, NULL, &rtp_hdr_2));
+
+ // Invalid simulcast index.
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ rtp_hdr_1.simulcastIdx = 2;
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+}
+
+TEST_F(PayloadRouterTest, MaxPayloadLength) {
+ // Without any limitations from the modules, verify we get the max payload
+ // length for IP/UDP/SRTP with a MTU of 150 bytes.
+ const size_t kDefaultMaxLength = 1500 - 20 - 8 - 12 - 4;
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->DefaultMaxPayloadLength());
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
+
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+ payload_router_->SetSendingRtpModules(modules);
+
+ // Modules return a higher length than the default value.
+ EXPECT_CALL(rtp_1, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kDefaultMaxLength + 10));
+ EXPECT_CALL(rtp_2, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kDefaultMaxLength + 10));
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
+
+ // The modules return a value lower than default.
+ const size_t kTestMinPayloadLength = 1001;
+ EXPECT_CALL(rtp_1, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kTestMinPayloadLength + 10));
+ EXPECT_CALL(rtp_2, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kTestMinPayloadLength));
+ EXPECT_EQ(kTestMinPayloadLength, payload_router_->MaxPayloadLength());
+}
+
+TEST_F(PayloadRouterTest, SetTargetSendBitrates) {
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+ payload_router_->SetSendingRtpModules(modules);
+
+ const uint32_t bitrate_1 = 10000;
+ const uint32_t bitrate_2 = 76543;
+ std::vector<uint32_t> bitrates (2, bitrate_1);
+ bitrates[1] = bitrate_2;
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(1);
+ payload_router_->SetTargetSendBitrates(bitrates);
+
+ bitrates.resize(1);
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(0);
+ payload_router_->SetTargetSendBitrates(bitrates);
+
+ bitrates.resize(3);
+ bitrates[1] = bitrate_2;
+ bitrates[2] = bitrate_1 + bitrate_2;
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(1);
+ payload_router_->SetTargetSendBitrates(bitrates);
+}
+} // namespace webrtc
diff --git a/webrtc/video_engine/report_block_stats.cc b/webrtc/video_engine/report_block_stats.cc
new file mode 100644
index 0000000000..6df62882d8
--- /dev/null
+++ b/webrtc/video_engine/report_block_stats.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/report_block_stats.h"
+
+namespace webrtc {
+
+namespace {
+int FractionLost(uint32_t num_lost_sequence_numbers,
+ uint32_t num_sequence_numbers) {
+ if (num_sequence_numbers == 0) {
+ return 0;
+ }
+ return ((num_lost_sequence_numbers * 255) + (num_sequence_numbers / 2)) /
+ num_sequence_numbers;
+}
+} // namespace
+
+
+// Helper class for rtcp statistics.
+ReportBlockStats::ReportBlockStats()
+ : num_sequence_numbers_(0),
+ num_lost_sequence_numbers_(0) {
+}
+
+void ReportBlockStats::Store(const RtcpStatistics& rtcp_stats,
+ uint32_t remote_ssrc,
+ uint32_t source_ssrc) {
+ RTCPReportBlock block;
+ block.cumulativeLost = rtcp_stats.cumulative_lost;
+ block.fractionLost = rtcp_stats.fraction_lost;
+ block.extendedHighSeqNum = rtcp_stats.extended_max_sequence_number;
+ block.jitter = rtcp_stats.jitter;
+ block.remoteSSRC = remote_ssrc;
+ block.sourceSSRC = source_ssrc;
+ uint32_t num_sequence_numbers = 0;
+ uint32_t num_lost_sequence_numbers = 0;
+ StoreAndAddPacketIncrement(
+ block, &num_sequence_numbers, &num_lost_sequence_numbers);
+}
+
+RTCPReportBlock ReportBlockStats::AggregateAndStore(
+ const ReportBlockVector& report_blocks) {
+ RTCPReportBlock aggregate;
+ if (report_blocks.empty()) {
+ return aggregate;
+ }
+ uint32_t num_sequence_numbers = 0;
+ uint32_t num_lost_sequence_numbers = 0;
+ ReportBlockVector::const_iterator report_block = report_blocks.begin();
+ for (; report_block != report_blocks.end(); ++report_block) {
+ aggregate.cumulativeLost += report_block->cumulativeLost;
+ aggregate.jitter += report_block->jitter;
+ StoreAndAddPacketIncrement(*report_block,
+ &num_sequence_numbers,
+ &num_lost_sequence_numbers);
+ }
+
+ if (report_blocks.size() == 1) {
+ // No aggregation needed.
+ return report_blocks[0];
+ }
+ // Fraction lost since previous report block.
+ aggregate.fractionLost =
+ FractionLost(num_lost_sequence_numbers, num_sequence_numbers);
+ aggregate.jitter = static_cast<uint32_t>(
+ (aggregate.jitter + report_blocks.size() / 2) / report_blocks.size());
+ return aggregate;
+}
+
+void ReportBlockStats::StoreAndAddPacketIncrement(
+ const RTCPReportBlock& report_block,
+ uint32_t* num_sequence_numbers,
+ uint32_t* num_lost_sequence_numbers) {
+ // Get diff with previous report block.
+ ReportBlockMap::iterator prev_report_block = prev_report_blocks_.find(
+ report_block.sourceSSRC);
+ if (prev_report_block != prev_report_blocks_.end()) {
+ int seq_num_diff = report_block.extendedHighSeqNum -
+ prev_report_block->second.extendedHighSeqNum;
+ int cum_loss_diff = report_block.cumulativeLost -
+ prev_report_block->second.cumulativeLost;
+ if (seq_num_diff >= 0 && cum_loss_diff >= 0) {
+ *num_sequence_numbers += seq_num_diff;
+ *num_lost_sequence_numbers += cum_loss_diff;
+ // Update total number of packets/lost packets.
+ num_sequence_numbers_ += seq_num_diff;
+ num_lost_sequence_numbers_ += cum_loss_diff;
+ }
+ }
+ // Store current report block.
+ prev_report_blocks_[report_block.sourceSSRC] = report_block;
+}
+
+int ReportBlockStats::FractionLostInPercent() const {
+ if (num_sequence_numbers_ == 0) {
+ return -1;
+ }
+ return FractionLost(
+ num_lost_sequence_numbers_, num_sequence_numbers_) * 100 / 255;
+}
+
+} // namespace webrtc
+
diff --git a/webrtc/video_engine/report_block_stats.h b/webrtc/video_engine/report_block_stats.h
new file mode 100644
index 0000000000..dadcc9d410
--- /dev/null
+++ b/webrtc/video_engine/report_block_stats.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_REPORT_BLOCK_STATS_H_
+#define WEBRTC_VIDEO_ENGINE_REPORT_BLOCK_STATS_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+// Helper class for rtcp statistics.
+class ReportBlockStats {
+ public:
+ typedef std::map<uint32_t, RTCPReportBlock> ReportBlockMap;
+ typedef std::vector<RTCPReportBlock> ReportBlockVector;
+ ReportBlockStats();
+ ~ReportBlockStats() {}
+
+ // Updates stats and stores report blocks.
+ // Returns an aggregate of the |report_blocks|.
+ RTCPReportBlock AggregateAndStore(const ReportBlockVector& report_blocks);
+
+ // Updates stats and stores report block.
+ void Store(const RtcpStatistics& rtcp_stats,
+ uint32_t remote_ssrc,
+ uint32_t source_ssrc);
+
+ // Returns the total fraction of lost packets (or -1 if less than two report
+ // blocks have been stored).
+ int FractionLostInPercent() const;
+
+ private:
+ // Updates the total number of packets/lost packets.
+ // Stores the report block.
+ // Returns the number of packets/lost packets since previous report block.
+ void StoreAndAddPacketIncrement(const RTCPReportBlock& report_block,
+ uint32_t* num_sequence_numbers,
+ uint32_t* num_lost_sequence_numbers);
+
+ // The total number of packets/lost packets.
+ uint32_t num_sequence_numbers_;
+ uint32_t num_lost_sequence_numbers_;
+
+ // Map holding the last stored report block (mapped by the source SSRC).
+ ReportBlockMap prev_report_blocks_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_REPORT_BLOCK_STATS_H_
+
diff --git a/webrtc/video_engine/report_block_stats_unittest.cc b/webrtc/video_engine/report_block_stats_unittest.cc
new file mode 100644
index 0000000000..13b7af5ba2
--- /dev/null
+++ b/webrtc/video_engine/report_block_stats_unittest.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/video_engine/report_block_stats.h"
+
+namespace webrtc {
+
+class ReportBlockStatsTest : public ::testing::Test {
+ protected:
+ ReportBlockStatsTest() : kSsrc1(0x12345), kSsrc2(0x23456) {}
+
+ void SetUp() override {
+ // kSsrc1: block 1-3.
+ block1_1_.cumulativeLost = 10;
+ block1_1_.fractionLost = 123;
+ block1_1_.extendedHighSeqNum = 24000;
+ block1_1_.jitter = 777;
+ block1_1_.sourceSSRC = kSsrc1;
+ block1_2_.cumulativeLost = 15;
+ block1_2_.fractionLost = 0;
+ block1_2_.extendedHighSeqNum = 24100;
+ block1_2_.jitter = 222;
+ block1_2_.sourceSSRC = kSsrc1;
+ block1_3_.cumulativeLost = 50;
+ block1_3_.fractionLost = 0;
+ block1_3_.extendedHighSeqNum = 24200;
+ block1_3_.jitter = 333;
+ block1_3_.sourceSSRC = kSsrc1;
+ // kSsrc2: block 1,2.
+ block2_1_.cumulativeLost = 111;
+ block2_1_.fractionLost = 222;
+ block2_1_.extendedHighSeqNum = 8500;
+ block2_1_.jitter = 555;
+ block2_1_.sourceSSRC = kSsrc2;
+ block2_2_.cumulativeLost = 136;
+ block2_2_.fractionLost = 0;
+ block2_2_.extendedHighSeqNum = 8800;
+ block2_2_.jitter = 888;
+ block2_2_.sourceSSRC = kSsrc2;
+
+ ssrc1block1_.push_back(block1_1_);
+ ssrc1block2_.push_back(block1_2_);
+ ssrc12block1_.push_back(block1_1_);
+ ssrc12block1_.push_back(block2_1_);
+ ssrc12block2_.push_back(block1_2_);
+ ssrc12block2_.push_back(block2_2_);
+ }
+
+ RtcpStatistics RtcpReportBlockToRtcpStatistics(
+ const RTCPReportBlock& stats) {
+ RtcpStatistics block;
+ block.cumulative_lost = stats.cumulativeLost;
+ block.fraction_lost = stats.fractionLost;
+ block.extended_max_sequence_number = stats.extendedHighSeqNum;
+ block.jitter = stats.jitter;
+ return block;
+ }
+
+ const uint32_t kSsrc1;
+ const uint32_t kSsrc2;
+ RTCPReportBlock block1_1_;
+ RTCPReportBlock block1_2_;
+ RTCPReportBlock block1_3_;
+ RTCPReportBlock block2_1_;
+ RTCPReportBlock block2_2_;
+ std::vector<RTCPReportBlock> ssrc1block1_;
+ std::vector<RTCPReportBlock> ssrc1block2_;
+ std::vector<RTCPReportBlock> ssrc12block1_;
+ std::vector<RTCPReportBlock> ssrc12block2_;
+};
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_NoSsrc) {
+ ReportBlockStats stats;
+ std::vector<RTCPReportBlock> empty;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(empty);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(0U, aggregated.cumulativeLost);
+ EXPECT_EQ(0U, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+}
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_OneSsrc) {
+ ReportBlockStats stats;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(ssrc1block1_);
+ // One ssrc, no aggregation done.
+ EXPECT_EQ(123U, aggregated.fractionLost);
+ EXPECT_EQ(10U, aggregated.cumulativeLost);
+ EXPECT_EQ(777U, aggregated.jitter);
+ EXPECT_EQ(24000U, aggregated.extendedHighSeqNum);
+
+ aggregated = stats.AggregateAndStore(ssrc1block2_);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(15U, aggregated.cumulativeLost);
+ EXPECT_EQ(222U, aggregated.jitter);
+ EXPECT_EQ(24100U, aggregated.extendedHighSeqNum);
+
+ // fl: 100 * (15-10) / (24100-24000) = 5%
+ EXPECT_EQ(5, stats.FractionLostInPercent());
+}
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_TwoSsrcs) {
+ ReportBlockStats stats;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(ssrc12block1_);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(10U + 111U, aggregated.cumulativeLost);
+ EXPECT_EQ((777U + 555U) / 2, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+
+ aggregated = stats.AggregateAndStore(ssrc12block2_);
+ // fl: 255 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 19
+ EXPECT_EQ(19U, aggregated.fractionLost);
+ EXPECT_EQ(15U + 136U, aggregated.cumulativeLost);
+ EXPECT_EQ((222U + 888U) / 2, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+
+ // fl: 100 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 7%
+ EXPECT_EQ(7, stats.FractionLostInPercent());
+}
+
+TEST_F(ReportBlockStatsTest, StoreAndGetFractionLost) {
+ const uint32_t kRemoteSsrc = 1;
+ ReportBlockStats stats;
+ EXPECT_EQ(-1, stats.FractionLostInPercent());
+
+ // First block.
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_1_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(-1, stats.FractionLostInPercent());
+ // fl: 100 * (15-10) / (24100-24000) = 5%
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_2_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(5, stats.FractionLostInPercent());
+ // fl: 100 * (50-10) / (24200-24000) = 20%
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_3_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(20, stats.FractionLostInPercent());
+}
+
+} // namespace webrtc
+
diff --git a/webrtc/video_engine/stream_synchronization.cc b/webrtc/video_engine/stream_synchronization.cc
new file mode 100644
index 0000000000..b78cfe8874
--- /dev/null
+++ b/webrtc/video_engine/stream_synchronization.cc
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/stream_synchronization.h"
+
+#include <assert.h>
+#include <math.h>
+#include <stdlib.h>
+
+#include <algorithm>
+
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+static const int kMaxChangeMs = 80;
+static const int kMaxDeltaDelayMs = 10000;
+static const int kFilterLength = 4;
+// Minimum difference between audio and video to warrant a change.
+static const int kMinDeltaMs = 30;
+
+struct ViESyncDelay {
+ ViESyncDelay() {
+ extra_video_delay_ms = 0;
+ last_video_delay_ms = 0;
+ extra_audio_delay_ms = 0;
+ last_audio_delay_ms = 0;
+ network_delay = 120;
+ }
+
+ int extra_video_delay_ms;
+ int last_video_delay_ms;
+ int extra_audio_delay_ms;
+ int last_audio_delay_ms;
+ int network_delay;
+};
+
+StreamSynchronization::StreamSynchronization(uint32_t video_primary_ssrc,
+ int audio_channel_id)
+ : channel_delay_(new ViESyncDelay),
+ video_primary_ssrc_(video_primary_ssrc),
+ audio_channel_id_(audio_channel_id),
+ base_target_delay_ms_(0),
+ avg_diff_ms_(0) {
+}
+
+StreamSynchronization::~StreamSynchronization() {
+ delete channel_delay_;
+}
+
+bool StreamSynchronization::ComputeRelativeDelay(
+ const Measurements& audio_measurement,
+ const Measurements& video_measurement,
+ int* relative_delay_ms) {
+ assert(relative_delay_ms);
+ if (audio_measurement.rtcp.size() < 2 || video_measurement.rtcp.size() < 2) {
+ // We need two RTCP SR reports per stream to do synchronization.
+ return false;
+ }
+ int64_t audio_last_capture_time_ms;
+ if (!RtpToNtpMs(audio_measurement.latest_timestamp,
+ audio_measurement.rtcp,
+ &audio_last_capture_time_ms)) {
+ return false;
+ }
+ int64_t video_last_capture_time_ms;
+ if (!RtpToNtpMs(video_measurement.latest_timestamp,
+ video_measurement.rtcp,
+ &video_last_capture_time_ms)) {
+ return false;
+ }
+ if (video_last_capture_time_ms < 0) {
+ return false;
+ }
+ // Positive diff means that video_measurement is behind audio_measurement.
+ *relative_delay_ms = video_measurement.latest_receive_time_ms -
+ audio_measurement.latest_receive_time_ms -
+ (video_last_capture_time_ms - audio_last_capture_time_ms);
+ if (*relative_delay_ms > kMaxDeltaDelayMs ||
+ *relative_delay_ms < -kMaxDeltaDelayMs) {
+ return false;
+ }
+ return true;
+}
+
+bool StreamSynchronization::ComputeDelays(int relative_delay_ms,
+ int current_audio_delay_ms,
+ int* total_audio_delay_target_ms,
+ int* total_video_delay_target_ms) {
+ assert(total_audio_delay_target_ms && total_video_delay_target_ms);
+
+ int current_video_delay_ms = *total_video_delay_target_ms;
+ LOG(LS_VERBOSE) << "Audio delay: " << current_audio_delay_ms
+ << ", network delay diff: " << channel_delay_->network_delay
+ << " current diff: " << relative_delay_ms
+ << " for channel " << audio_channel_id_;
+ // Calculate the difference between the lowest possible video delay and
+ // the current audio delay.
+ int current_diff_ms = current_video_delay_ms - current_audio_delay_ms +
+ relative_delay_ms;
+
+ avg_diff_ms_ = ((kFilterLength - 1) * avg_diff_ms_ +
+ current_diff_ms) / kFilterLength;
+ if (abs(avg_diff_ms_) < kMinDeltaMs) {
+ // Don't adjust if the diff is within our margin.
+ return false;
+ }
+
+ // Make sure we don't move too fast.
+ int diff_ms = avg_diff_ms_ / 2;
+ diff_ms = std::min(diff_ms, kMaxChangeMs);
+ diff_ms = std::max(diff_ms, -kMaxChangeMs);
+
+ // Reset the average after a move to prevent overshooting reaction.
+ avg_diff_ms_ = 0;
+
+ if (diff_ms > 0) {
+ // The minimum video delay is longer than the current audio delay.
+ // We need to decrease extra video delay, or add extra audio delay.
+ if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
+ // We have extra delay added to ViE. Reduce this delay before adding
+ // extra delay to VoE.
+ channel_delay_->extra_video_delay_ms -= diff_ms;
+ channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
+ } else { // channel_delay_->extra_video_delay_ms > 0
+ // We have no extra video delay to remove, increase the audio delay.
+ channel_delay_->extra_audio_delay_ms += diff_ms;
+ channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
+ }
+ } else { // if (diff_ms > 0)
+ // The video delay is lower than the current audio delay.
+ // We need to decrease extra audio delay, or add extra video delay.
+ if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
+ // We have extra delay in VoiceEngine.
+ // Start with decreasing the voice delay.
+ // Note: diff_ms is negative; add the negative difference.
+ channel_delay_->extra_audio_delay_ms += diff_ms;
+ channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
+ } else { // channel_delay_->extra_audio_delay_ms > base_target_delay_ms_
+ // We have no extra delay in VoiceEngine, increase the video delay.
+ // Note: diff_ms is negative; subtract the negative difference.
+ channel_delay_->extra_video_delay_ms -= diff_ms; // X - (-Y) = X + Y.
+ channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
+ }
+ }
+
+ // Make sure that video is never below our target.
+ channel_delay_->extra_video_delay_ms = std::max(
+ channel_delay_->extra_video_delay_ms, base_target_delay_ms_);
+
+ int new_video_delay_ms;
+ if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
+ new_video_delay_ms = channel_delay_->extra_video_delay_ms;
+ } else {
+ // No change to the extra video delay. We are changing audio and we only
+ // allow to change one at the time.
+ new_video_delay_ms = channel_delay_->last_video_delay_ms;
+ }
+
+ // Make sure that we don't go below the extra video delay.
+ new_video_delay_ms = std::max(
+ new_video_delay_ms, channel_delay_->extra_video_delay_ms);
+
+ // Verify we don't go above the maximum allowed video delay.
+ new_video_delay_ms =
+ std::min(new_video_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
+
+ int new_audio_delay_ms;
+ if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
+ new_audio_delay_ms = channel_delay_->extra_audio_delay_ms;
+ } else {
+ // No change to the audio delay. We are changing video and we only
+ // allow to change one at the time.
+ new_audio_delay_ms = channel_delay_->last_audio_delay_ms;
+ }
+
+ // Make sure that we don't go below the extra audio delay.
+ new_audio_delay_ms = std::max(
+ new_audio_delay_ms, channel_delay_->extra_audio_delay_ms);
+
+ // Verify we don't go above the maximum allowed audio delay.
+ new_audio_delay_ms =
+ std::min(new_audio_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
+
+ // Remember our last audio and video delays.
+ channel_delay_->last_video_delay_ms = new_video_delay_ms;
+ channel_delay_->last_audio_delay_ms = new_audio_delay_ms;
+
+ LOG(LS_VERBOSE) << "Sync video delay " << new_video_delay_ms
+ << " for video primary SSRC " << video_primary_ssrc_
+ << " and audio delay " << channel_delay_->extra_audio_delay_ms
+ << " for audio channel " << audio_channel_id_;
+
+ // Return values.
+ *total_video_delay_target_ms = new_video_delay_ms;
+ *total_audio_delay_target_ms = new_audio_delay_ms;
+ return true;
+}
+
+void StreamSynchronization::SetTargetBufferingDelay(int target_delay_ms) {
+ // Initial extra delay for audio (accounting for existing extra delay).
+ channel_delay_->extra_audio_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+ channel_delay_->last_audio_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ // The video delay is compared to the last value (and how much we can update
+ // is limited by that as well).
+ channel_delay_->last_video_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ channel_delay_->extra_video_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ // Video is already delayed by the desired amount.
+ base_target_delay_ms_ = target_delay_ms;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/stream_synchronization.h b/webrtc/video_engine/stream_synchronization.h
new file mode 100644
index 0000000000..1209062f9b
--- /dev/null
+++ b/webrtc/video_engine/stream_synchronization.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
+#define WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
+
+#include <list>
+
+#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct ViESyncDelay;
+
+class StreamSynchronization {
+ public:
+ struct Measurements {
+ Measurements() : rtcp(), latest_receive_time_ms(0), latest_timestamp(0) {}
+ RtcpList rtcp;
+ int64_t latest_receive_time_ms;
+ uint32_t latest_timestamp;
+ };
+
+ StreamSynchronization(uint32_t video_primary_ssrc, int audio_channel_id);
+ ~StreamSynchronization();
+
+ bool ComputeDelays(int relative_delay_ms,
+ int current_audio_delay_ms,
+ int* extra_audio_delay_ms,
+ int* total_video_delay_target_ms);
+
+ // On success |relative_delay| contains the number of milliseconds later video
+ // is rendered relative audio. If audio is played back later than video a
+ // |relative_delay| will be negative.
+ static bool ComputeRelativeDelay(const Measurements& audio_measurement,
+ const Measurements& video_measurement,
+ int* relative_delay_ms);
+ // Set target buffering delay - All audio and video will be delayed by at
+ // least target_delay_ms.
+ void SetTargetBufferingDelay(int target_delay_ms);
+
+ private:
+ ViESyncDelay* channel_delay_;
+ const uint32_t video_primary_ssrc_;
+ const int audio_channel_id_;
+ int base_target_delay_ms_;
+ int avg_diff_ms_;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
diff --git a/webrtc/video_engine/stream_synchronization_unittest.cc b/webrtc/video_engine/stream_synchronization_unittest.cc
new file mode 100644
index 0000000000..7136f1e1c7
--- /dev/null
+++ b/webrtc/video_engine/stream_synchronization_unittest.cc
@@ -0,0 +1,562 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <math.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/video_engine/stream_synchronization.h"
+
+namespace webrtc {
+
+// These correspond to the same constants defined in vie_sync_module.cc.
+enum { kMaxVideoDiffMs = 80 };
+enum { kMaxAudioDiffMs = 80 };
+enum { kMaxDelay = 1500 };
+
+// Test constants.
+enum { kDefaultAudioFrequency = 8000 };
+enum { kDefaultVideoFrequency = 90000 };
+const double kNtpFracPerMs = 4.294967296E6;
+static const int kSmoothingFilter = 4 * 2;
+
+class Time {
+ public:
+ explicit Time(int64_t offset)
+ : kNtpJan1970(2208988800UL),
+ time_now_ms_(offset) {}
+
+ RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const {
+ RtcpMeasurement rtcp;
+ NowNtp(&rtcp.ntp_secs, &rtcp.ntp_frac);
+ rtcp.rtp_timestamp = NowRtp(frequency, offset);
+ return rtcp;
+ }
+
+ void NowNtp(uint32_t* ntp_secs, uint32_t* ntp_frac) const {
+ *ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
+ int64_t remainder_ms = time_now_ms_ % 1000;
+ *ntp_frac = static_cast<uint32_t>(
+ static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5);
+ }
+
+ uint32_t NowRtp(int frequency, uint32_t offset) const {
+ return frequency * time_now_ms_ / 1000 + offset;
+ }
+
+ void IncreaseTimeMs(int64_t inc) {
+ time_now_ms_ += inc;
+ }
+
+ int64_t time_now_ms() const {
+ return time_now_ms_;
+ }
+
+ private:
+ // January 1970, in NTP seconds.
+ const uint32_t kNtpJan1970;
+ int64_t time_now_ms_;
+};
+
+class StreamSynchronizationTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ sync_ = new StreamSynchronization(0, 0);
+ send_time_ = new Time(kSendTimeOffsetMs);
+ receive_time_ = new Time(kReceiveTimeOffsetMs);
+ audio_clock_drift_ = 1.0;
+ video_clock_drift_ = 1.0;
+ }
+
+ virtual void TearDown() {
+ delete sync_;
+ delete send_time_;
+ delete receive_time_;
+ }
+
+ // Generates the necessary RTCP measurements and RTP timestamps and computes
+ // the audio and video delays needed to get the two streams in sync.
+ // |audio_delay_ms| and |video_delay_ms| are the number of milliseconds after
+ // capture which the frames are rendered.
+ // |current_audio_delay_ms| is the number of milliseconds which audio is
+ // currently being delayed by the receiver.
+ bool DelayedStreams(int audio_delay_ms,
+ int video_delay_ms,
+ int current_audio_delay_ms,
+ int* extra_audio_delay_ms,
+ int* total_video_delay_ms) {
+ int audio_frequency = static_cast<int>(kDefaultAudioFrequency *
+ audio_clock_drift_ + 0.5);
+ int audio_offset = 0;
+ int video_frequency = static_cast<int>(kDefaultVideoFrequency *
+ video_clock_drift_ + 0.5);
+ int video_offset = 0;
+ StreamSynchronization::Measurements audio;
+ StreamSynchronization::Measurements video;
+ // Generate NTP/RTP timestamp pair for both streams corresponding to RTCP.
+ audio.rtcp.push_front(send_time_->GenerateRtcp(audio_frequency,
+ audio_offset));
+ send_time_->IncreaseTimeMs(100);
+ receive_time_->IncreaseTimeMs(100);
+ video.rtcp.push_front(send_time_->GenerateRtcp(video_frequency,
+ video_offset));
+ send_time_->IncreaseTimeMs(900);
+ receive_time_->IncreaseTimeMs(900);
+ audio.rtcp.push_front(send_time_->GenerateRtcp(audio_frequency,
+ audio_offset));
+ send_time_->IncreaseTimeMs(100);
+ receive_time_->IncreaseTimeMs(100);
+ video.rtcp.push_front(send_time_->GenerateRtcp(video_frequency,
+ video_offset));
+ send_time_->IncreaseTimeMs(900);
+ receive_time_->IncreaseTimeMs(900);
+
+ // Capture an audio and a video frame at the same time.
+ audio.latest_timestamp = send_time_->NowRtp(audio_frequency,
+ audio_offset);
+ video.latest_timestamp = send_time_->NowRtp(video_frequency,
+ video_offset);
+
+ if (audio_delay_ms > video_delay_ms) {
+ // Audio later than video.
+ receive_time_->IncreaseTimeMs(video_delay_ms);
+ video.latest_receive_time_ms = receive_time_->time_now_ms();
+ receive_time_->IncreaseTimeMs(audio_delay_ms - video_delay_ms);
+ audio.latest_receive_time_ms = receive_time_->time_now_ms();
+ } else {
+ // Video later than audio.
+ receive_time_->IncreaseTimeMs(audio_delay_ms);
+ audio.latest_receive_time_ms = receive_time_->time_now_ms();
+ receive_time_->IncreaseTimeMs(video_delay_ms - audio_delay_ms);
+ video.latest_receive_time_ms = receive_time_->time_now_ms();
+ }
+ int relative_delay_ms;
+ StreamSynchronization::ComputeRelativeDelay(audio, video,
+ &relative_delay_ms);
+ EXPECT_EQ(video_delay_ms - audio_delay_ms, relative_delay_ms);
+ return sync_->ComputeDelays(relative_delay_ms,
+ current_audio_delay_ms,
+ extra_audio_delay_ms,
+ total_video_delay_ms);
+ }
+
+ // Simulate audio playback 300 ms after capture and video rendering 100 ms
+ // after capture. Verify that the correct extra delays are calculated for
+ // audio and video, and that they change correctly when we simulate that
+ // NetEQ or the VCM adds more delay to the streams.
+ // TODO(holmer): This is currently wrong! We should simply change
+ // audio_delay_ms or video_delay_ms since those now include VCM and NetEQ
+ // delays.
+ void BothDelayedAudioLaterTest(int base_target_delay) {
+ int current_audio_delay_ms = base_target_delay;
+ int audio_delay_ms = base_target_delay + 300;
+ int video_delay_ms = base_target_delay + 100;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay;
+ int filtered_move = (audio_delay_ms - video_delay_ms) / kSmoothingFilter;
+ const int kNeteqDelayIncrease = 50;
+ const int kNeteqDelayDecrease = 10;
+
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + 2 * filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + 3 * filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+
+ // Simulate that NetEQ introduces some audio delay.
+ current_audio_delay_ms = base_target_delay + kNeteqDelayIncrease;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ filtered_move = 3 * filtered_move +
+ (kNeteqDelayIncrease + audio_delay_ms - video_delay_ms) /
+ kSmoothingFilter;
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+
+ // Simulate that NetEQ reduces its delay.
+ current_audio_delay_ms = base_target_delay + kNeteqDelayDecrease;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+
+ filtered_move = filtered_move +
+ (kNeteqDelayDecrease + audio_delay_ms - video_delay_ms) /
+ kSmoothingFilter;
+
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ }
+
+ void BothDelayedVideoLaterTest(int base_target_delay) {
+ int current_audio_delay_ms = base_target_delay;
+ int audio_delay_ms = base_target_delay + 100;
+ int video_delay_ms = base_target_delay + 300;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay;
+
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than this in 1 second.
+ EXPECT_GE(base_target_delay + kMaxAudioDiffMs, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ int current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason reduced the delay.
+ current_audio_delay_ms = base_target_delay + 10;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // Since we only can ask NetEQ for a certain amount of extra delay, and
+ // we only measure the total NetEQ delay, we will ask for additional delay
+ // here to try to stay in sync.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason significantly increased the delay.
+ current_audio_delay_ms = base_target_delay + 350;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ }
+
+ int MaxAudioDelayIncrease(int current_audio_delay_ms, int delay_ms) {
+ return std::min((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
+ static_cast<int>(kMaxAudioDiffMs));
+ }
+
+ int MaxAudioDelayDecrease(int current_audio_delay_ms, int delay_ms) {
+ return std::max((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
+ -kMaxAudioDiffMs);
+ }
+
+ enum { kSendTimeOffsetMs = 98765 };
+ enum { kReceiveTimeOffsetMs = 43210 };
+
+ StreamSynchronization* sync_;
+ Time* send_time_; // The simulated clock at the sender.
+ Time* receive_time_; // The simulated clock at the receiver.
+ double audio_clock_drift_;
+ double video_clock_drift_;
+};
+
+TEST_F(StreamSynchronizationTest, NoDelay) {
+ uint32_t current_audio_delay_ms = 0;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_FALSE(DelayedStreams(0, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ EXPECT_EQ(0, total_video_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, VideoDelay) {
+ uint32_t current_audio_delay_ms = 0;
+ int delay_ms = 200;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ // The video delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(delay_ms / kSmoothingFilter, total_video_delay_ms);
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ // Simulate 0 minimum delay in the VCM.
+ total_video_delay_ms = 0;
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ // The video delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(2 * delay_ms / kSmoothingFilter, total_video_delay_ms);
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ // Simulate 0 minimum delay in the VCM.
+ total_video_delay_ms = 0;
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ EXPECT_EQ(3 * delay_ms / kSmoothingFilter, total_video_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, AudioDelay) {
+ int current_audio_delay_ms = 0;
+ int delay_ms = 200;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(delay_ms / kSmoothingFilter, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ int current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason reduced the delay.
+ current_audio_delay_ms = 10;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // Since we only can ask NetEQ for a certain amount of extra delay, and
+ // we only measure the total NetEQ delay, we will ask for additional delay
+ // here to try to
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason significantly increased the delay.
+ current_audio_delay_ms = 350;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayDecrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterAudioClockDrift) {
+ audio_clock_drift_ = 1.05;
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterVideoClockDrift) {
+ video_clock_drift_ = 1.05;
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDrift) {
+ audio_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDrift) {
+ video_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BaseDelay) {
+ int base_target_delay_ms = 2000;
+ int current_audio_delay_ms = 2000;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ // Triggering another call with the same values. Delay should not be modified.
+ base_target_delay_ms = 2000;
+ current_audio_delay_ms = base_target_delay_ms;
+ total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ // Changing delay value - intended to test this module only. In practice it
+ // would take VoE time to adapt.
+ base_target_delay_ms = 5000;
+ current_audio_delay_ms = base_target_delay_ms;
+ total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioLaterWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ audio_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ video_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest,
+ BothDelayedVideoLaterAudioClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ audio_clock_drift_ = 1.05;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest,
+ BothDelayedVideoLaterVideoClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ video_clock_drift_ = 1.05;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/video_engine_core_unittests.gyp b/webrtc/video_engine/video_engine_core_unittests.gyp
new file mode 100644
index 0000000000..d0143442b4
--- /dev/null
+++ b/webrtc/video_engine/video_engine_core_unittests.gyp
@@ -0,0 +1,74 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': [
+ '../build/common.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'video_engine_core_unittests',
+ 'type': '<(gtest_target_type)',
+ 'dependencies': [
+ '<(webrtc_root)/webrtc.gyp:webrtc',
+ '<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
+ '<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ 'sources': [
+ 'call_stats_unittest.cc',
+ 'encoder_state_feedback_unittest.cc',
+ 'overuse_frame_detector_unittest.cc',
+ 'payload_router_unittest.cc',
+ 'report_block_stats_unittest.cc',
+ 'stream_synchronization_unittest.cc',
+ 'vie_codec_unittest.cc',
+ 'vie_remb_unittest.cc',
+ ],
+ 'conditions': [
+ ['OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
+ ],
+ },
+ ], # targets
+ 'conditions': [
+ ['OS=="android"', {
+ 'targets': [
+ {
+ 'target_name': 'video_engine_core_unittests_apk_target',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):video_engine_core_unittests_apk',
+ ],
+ },
+ ],
+ }],
+ ['test_isolation_mode != "noop"', {
+ 'targets': [
+ {
+ 'target_name': 'video_engine_core_unittests_run',
+ 'type': 'none',
+ 'dependencies': [
+ 'video_engine_core_unittests',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'video_engine_core_unittests.isolate',
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/webrtc/video_engine/video_engine_core_unittests.isolate b/webrtc/video_engine/video_engine_core_unittests.isolate
new file mode 100644
index 0000000000..c8d2fc9026
--- /dev/null
+++ b/webrtc/video_engine/video_engine_core_unittests.isolate
@@ -0,0 +1,23 @@
+# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'conditions': [
+ ['OS=="linux" or OS=="mac" or OS=="win"', {
+ 'variables': {
+ 'command': [
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/video_engine_core_unittests<(EXECUTABLE_SUFFIX)',
+ ],
+ 'files': [
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/video_engine_core_unittests<(EXECUTABLE_SUFFIX)',
+ ],
+ },
+ }],
+ ],
+}
diff --git a/webrtc/video_engine/vie_channel.cc b/webrtc/video_engine/vie_channel.cc
new file mode 100644
index 0000000000..147ecb1456
--- /dev/null
+++ b/webrtc/video_engine/vie_channel.cc
@@ -0,0 +1,1253 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/vie_channel.h"
+
+#include <algorithm>
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/common.h"
+#include "webrtc/common_video/interface/incoming_video_stream.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/pacing/include/paced_sender.h"
+#include "webrtc/modules/pacing/include/packet_router.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/video_coding/main/interface/video_coding.h"
+#include "webrtc/modules/video_processing/main/interface/video_processing.h"
+#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/video/receive_statistics_proxy.h"
+#include "webrtc/video_engine/call_stats.h"
+#include "webrtc/video_engine/payload_router.h"
+#include "webrtc/video_engine/report_block_stats.h"
+#include "webrtc/video_engine/vie_defines.h"
+
+namespace webrtc {
+
+const int kMaxDecodeWaitTimeMs = 50;
+static const int kMaxTargetDelayMs = 10000;
+static const float kMaxIncompleteTimeMultiplier = 3.5f;
+
+// Helper class receiving statistics callbacks.
+class ChannelStatsObserver : public CallStatsObserver {
+ public:
+ explicit ChannelStatsObserver(ViEChannel* owner) : owner_(owner) {}
+ virtual ~ChannelStatsObserver() {}
+
+ // Implements StatsObserver.
+ virtual void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
+ owner_->OnRttUpdate(avg_rtt_ms, max_rtt_ms);
+ }
+
+ private:
+ ViEChannel* const owner_;
+};
+
+class ViEChannelProtectionCallback : public VCMProtectionCallback {
+ public:
+ ViEChannelProtectionCallback(ViEChannel* owner) : owner_(owner) {}
+ ~ViEChannelProtectionCallback() {}
+
+
+ int ProtectionRequest(
+ const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps) override {
+ return owner_->ProtectionRequest(delta_fec_params, key_fec_params,
+ sent_video_rate_bps, sent_nack_rate_bps,
+ sent_fec_rate_bps);
+ }
+ private:
+ ViEChannel* owner_;
+};
+
+ViEChannel::ViEChannel(uint32_t number_of_cores,
+ Transport* transport,
+ ProcessThread* module_process_thread,
+ RtcpIntraFrameObserver* intra_frame_observer,
+ RtcpBandwidthObserver* bandwidth_observer,
+ TransportFeedbackObserver* transport_feedback_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtcpRttStats* rtt_stats,
+ PacedSender* paced_sender,
+ PacketRouter* packet_router,
+ size_t max_rtp_streams,
+ bool sender)
+ : number_of_cores_(number_of_cores),
+ sender_(sender),
+ module_process_thread_(module_process_thread),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ send_payload_router_(new PayloadRouter()),
+ vcm_protection_callback_(new ViEChannelProtectionCallback(this)),
+ vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
+ nullptr,
+ nullptr)),
+ vie_receiver_(vcm_, remote_bitrate_estimator, this),
+ vie_sync_(vcm_),
+ stats_observer_(new ChannelStatsObserver(this)),
+ receive_stats_callback_(nullptr),
+ incoming_video_stream_(nullptr),
+ intra_frame_observer_(intra_frame_observer),
+ rtt_stats_(rtt_stats),
+ paced_sender_(paced_sender),
+ packet_router_(packet_router),
+ bandwidth_observer_(bandwidth_observer),
+ transport_feedback_observer_(transport_feedback_observer),
+ nack_history_size_sender_(kSendSidePacketHistorySize),
+ max_nack_reordering_threshold_(kMaxPacketAgeToNack),
+ pre_render_callback_(NULL),
+ report_block_stats_sender_(new ReportBlockStats()),
+ time_of_first_rtt_ms_(-1),
+ rtt_sum_ms_(0),
+ last_rtt_ms_(0),
+ num_rtts_(0),
+ rtp_rtcp_modules_(
+ CreateRtpRtcpModules(!sender,
+ vie_receiver_.GetReceiveStatistics(),
+ transport,
+ sender ? intra_frame_observer_ : nullptr,
+ sender ? bandwidth_observer_.get() : nullptr,
+ transport_feedback_observer_,
+ rtt_stats_,
+ &rtcp_packet_type_counter_observer_,
+ remote_bitrate_estimator,
+ paced_sender_,
+ packet_router_,
+ &send_bitrate_observer_,
+ &send_frame_count_observer_,
+ &send_side_delay_observer_,
+ max_rtp_streams)),
+ num_active_rtp_rtcp_modules_(1) {
+ vie_receiver_.SetRtpRtcpModule(rtp_rtcp_modules_[0]);
+ vcm_->SetNackSettings(kMaxNackListSize, max_nack_reordering_threshold_, 0);
+}
+
+int32_t ViEChannel::Init() {
+ module_process_thread_->RegisterModule(vie_receiver_.GetReceiveStatistics());
+
+ // RTP/RTCP initialization.
+ module_process_thread_->RegisterModule(rtp_rtcp_modules_[0]);
+
+ rtp_rtcp_modules_[0]->SetKeyFrameRequestMethod(kKeyFrameReqPliRtcp);
+ if (paced_sender_) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+ }
+ packet_router_->AddRtpModule(rtp_rtcp_modules_[0]);
+ if (sender_) {
+ std::list<RtpRtcp*> send_rtp_modules(1, rtp_rtcp_modules_[0]);
+ send_payload_router_->SetSendingRtpModules(send_rtp_modules);
+ RTC_DCHECK(!send_payload_router_->active());
+ }
+ if (vcm_->RegisterReceiveCallback(this) != 0) {
+ return -1;
+ }
+ vcm_->RegisterFrameTypeCallback(this);
+ vcm_->RegisterReceiveStatisticsCallback(this);
+ vcm_->RegisterDecoderTimingCallback(this);
+ vcm_->SetRenderDelay(kViEDefaultRenderDelayMs);
+
+ module_process_thread_->RegisterModule(vcm_);
+ module_process_thread_->RegisterModule(&vie_sync_);
+
+ return 0;
+}
+
+ViEChannel::~ViEChannel() {
+ UpdateHistograms();
+ // Make sure we don't get more callbacks from the RTP module.
+ module_process_thread_->DeRegisterModule(
+ vie_receiver_.GetReceiveStatistics());
+ module_process_thread_->DeRegisterModule(vcm_);
+ module_process_thread_->DeRegisterModule(&vie_sync_);
+ send_payload_router_->SetSendingRtpModules(std::list<RtpRtcp*>());
+ for (size_t i = 0; i < num_active_rtp_rtcp_modules_; ++i)
+ packet_router_->RemoveRtpModule(rtp_rtcp_modules_[i]);
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ module_process_thread_->DeRegisterModule(rtp_rtcp);
+ delete rtp_rtcp;
+ }
+ if (decode_thread_) {
+ StopDecodeThread();
+ }
+ // Release modules.
+ VideoCodingModule::Destroy(vcm_);
+}
+
+void ViEChannel::UpdateHistograms() {
+ int64_t now = Clock::GetRealTimeClock()->TimeInMilliseconds();
+
+ {
+ CriticalSectionScoped cs(crit_.get());
+ int64_t elapsed_sec = (now - time_of_first_rtt_ms_) / 1000;
+ if (time_of_first_rtt_ms_ != -1 && num_rtts_ > 0 &&
+ elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ int64_t avg_rtt_ms = (rtt_sum_ms_ + num_rtts_ / 2) / num_rtts_;
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.AverageRoundTripTimeInMilliseconds", avg_rtt_ms);
+ }
+ }
+
+ if (sender_) {
+ RtcpPacketTypeCounter rtcp_counter;
+ GetSendRtcpPacketTypeCounter(&rtcp_counter);
+ int64_t elapsed_sec = rtcp_counter.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsReceivedPerMinute",
+ rtcp_counter.nack_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsReceivedPerMinute",
+ rtcp_counter.fir_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsReceivedPerMinute",
+ rtcp_counter.pli_packets * 60 / elapsed_sec);
+ if (rtcp_counter.nack_requests > 0) {
+ RTC_HISTOGRAM_PERCENTAGE(
+ "WebRTC.Video.UniqueNackRequestsReceivedInPercent",
+ rtcp_counter.UniqueNackRequestsInPercent());
+ }
+ int fraction_lost = report_block_stats_sender_->FractionLostInPercent();
+ if (fraction_lost != -1) {
+ RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.SentPacketsLostInPercent",
+ fraction_lost);
+ }
+ }
+
+ StreamDataCounters rtp;
+ StreamDataCounters rtx;
+ GetSendStreamDataCounters(&rtp, &rtx);
+ StreamDataCounters rtp_rtx = rtp;
+ rtp_rtx.Add(rtx);
+ elapsed_sec = rtp_rtx.TimeSinceFirstPacketInMs(
+ Clock::GetRealTimeClock()->TimeInMilliseconds()) /
+ 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_100000(
+ "WebRTC.Video.BitrateSentInKbps",
+ static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.MediaBitrateSentInKbps",
+ static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.PaddingBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.RetransmittedBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
+ elapsed_sec / 1000));
+ if (rtp_rtcp_modules_[0]->RtxSendStatus() != kRtxOff) {
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.RtxBitrateSentInKbps",
+ static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ bool fec_enabled = false;
+ uint8_t pltype_red;
+ uint8_t pltype_fec;
+ rtp_rtcp_modules_[0]->GenericFECStatus(fec_enabled, pltype_red,
+ pltype_fec);
+ if (fec_enabled) {
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FecBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.fec.TotalBytes() *
+ 8 / elapsed_sec / 1000));
+ }
+ }
+ } else if (vie_receiver_.GetRemoteSsrc() > 0) {
+ // Get receive stats if we are receiving packets, i.e. there is a remote
+ // ssrc.
+ RtcpPacketTypeCounter rtcp_counter;
+ GetReceiveRtcpPacketTypeCounter(&rtcp_counter);
+ int64_t elapsed_sec = rtcp_counter.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsSentPerMinute",
+ rtcp_counter.nack_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsSentPerMinute",
+ rtcp_counter.fir_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsSentPerMinute",
+ rtcp_counter.pli_packets * 60 / elapsed_sec);
+ if (rtcp_counter.nack_requests > 0) {
+ RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.UniqueNackRequestsSentInPercent",
+ rtcp_counter.UniqueNackRequestsInPercent());
+ }
+ }
+
+ StreamDataCounters rtp;
+ StreamDataCounters rtx;
+ GetReceiveStreamDataCounters(&rtp, &rtx);
+ StreamDataCounters rtp_rtx = rtp;
+ rtp_rtx.Add(rtx);
+ elapsed_sec = rtp_rtx.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.BitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.MediaBitrateReceivedInKbps",
+ static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.PaddingBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.RetransmittedBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
+ elapsed_sec / 1000));
+ uint32_t ssrc = 0;
+ if (vie_receiver_.GetRtxSsrc(&ssrc)) {
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.RtxBitrateReceivedInKbps",
+ static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ if (vie_receiver_.IsFecEnabled()) {
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FecBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.fec.TotalBytes() *
+ 8 / elapsed_sec / 1000));
+ }
+ }
+ }
+}
+
+int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
+ bool new_stream) {
+ RTC_DCHECK(sender_);
+ if (video_codec.codecType == kVideoCodecRED ||
+ video_codec.codecType == kVideoCodecULPFEC) {
+ LOG_F(LS_ERROR) << "Not a valid send codec " << video_codec.codecType;
+ return -1;
+ }
+ if (kMaxSimulcastStreams < video_codec.numberOfSimulcastStreams) {
+ LOG_F(LS_ERROR) << "Incorrect config "
+ << video_codec.numberOfSimulcastStreams;
+ return -1;
+ }
+ // Update the RTP module with the settings.
+ // Stop and Start the RTP module -> trigger new SSRC, if an SSRC hasn't been
+ // set explicitly.
+ // The first layer is always active, so the first module can be checked for
+ // sending status.
+ bool is_sending = rtp_rtcp_modules_[0]->Sending();
+ bool router_was_active = send_payload_router_->active();
+ send_payload_router_->set_active(false);
+ send_payload_router_->SetSendingRtpModules(std::list<RtpRtcp*>());
+
+ std::vector<RtpRtcp*> registered_modules;
+ std::vector<RtpRtcp*> deregistered_modules;
+ size_t num_active_modules = video_codec.numberOfSimulcastStreams > 0
+ ? video_codec.numberOfSimulcastStreams
+ : 1;
+ size_t num_prev_active_modules;
+ {
+ // Cache which modules are active so StartSend can know which ones to start.
+ CriticalSectionScoped cs(crit_.get());
+ num_prev_active_modules = num_active_rtp_rtcp_modules_;
+ num_active_rtp_rtcp_modules_ = num_active_modules;
+ }
+ for (size_t i = 0; i < num_active_modules; ++i)
+ registered_modules.push_back(rtp_rtcp_modules_[i]);
+
+ for (size_t i = num_active_modules; i < rtp_rtcp_modules_.size(); ++i)
+ deregistered_modules.push_back(rtp_rtcp_modules_[i]);
+
+ // Disable inactive modules.
+ for (RtpRtcp* rtp_rtcp : deregistered_modules) {
+ rtp_rtcp->SetSendingStatus(false);
+ rtp_rtcp->SetSendingMediaStatus(false);
+ }
+
+ // Configure active modules.
+ for (RtpRtcp* rtp_rtcp : registered_modules) {
+ rtp_rtcp->DeRegisterSendPayload(video_codec.plType);
+ if (rtp_rtcp->RegisterSendPayload(video_codec) != 0) {
+ return -1;
+ }
+ rtp_rtcp->SetSendingStatus(is_sending);
+ rtp_rtcp->SetSendingMediaStatus(is_sending);
+ }
+
+ // |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
+ // modules can be deleted after this step.
+ vie_receiver_.RegisterRtpRtcpModules(registered_modules);
+
+ // Update the packet and payload routers with the sending RtpRtcp modules.
+ if (sender_) {
+ std::list<RtpRtcp*> active_send_modules;
+ for (RtpRtcp* rtp_rtcp : registered_modules)
+ active_send_modules.push_back(rtp_rtcp);
+ send_payload_router_->SetSendingRtpModules(active_send_modules);
+ }
+
+ if (router_was_active)
+ send_payload_router_->set_active(true);
+
+ // Deregister previously registered modules.
+ for (size_t i = num_active_modules; i < num_prev_active_modules; ++i) {
+ module_process_thread_->DeRegisterModule(rtp_rtcp_modules_[i]);
+ packet_router_->RemoveRtpModule(rtp_rtcp_modules_[i]);
+ }
+ // Register new active modules.
+ for (size_t i = num_prev_active_modules; i < num_active_modules; ++i) {
+ module_process_thread_->RegisterModule(rtp_rtcp_modules_[i]);
+ packet_router_->AddRtpModule(rtp_rtcp_modules_[i]);
+ }
+ return 0;
+}
+
+int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
+ RTC_DCHECK(!sender_);
+ if (!vie_receiver_.SetReceiveCodec(video_codec)) {
+ return -1;
+ }
+
+ if (video_codec.codecType != kVideoCodecRED &&
+ video_codec.codecType != kVideoCodecULPFEC) {
+ // Register codec type with VCM, but do not register RED or ULPFEC.
+ if (vcm_->RegisterReceiveCodec(&video_codec, number_of_cores_, false) !=
+ VCM_OK) {
+ return -1;
+ }
+ }
+ return 0;
+}
+
+
+int32_t ViEChannel::RegisterExternalDecoder(const uint8_t pl_type,
+ VideoDecoder* decoder,
+ bool buffered_rendering,
+ int32_t render_delay) {
+ RTC_DCHECK(!sender_);
+ int32_t result;
+ result = vcm_->RegisterExternalDecoder(decoder, pl_type, buffered_rendering);
+ if (result != VCM_OK) {
+ return result;
+ }
+ return vcm_->SetRenderDelay(render_delay);
+}
+
+int32_t ViEChannel::DeRegisterExternalDecoder(const uint8_t pl_type) {
+ RTC_DCHECK(!sender_);
+ VideoCodec current_receive_codec;
+ int32_t result = 0;
+ result = vcm_->ReceiveCodec(&current_receive_codec);
+ if (vcm_->RegisterExternalDecoder(NULL, pl_type, false) != VCM_OK) {
+ return -1;
+ }
+
+ if (result == 0 && current_receive_codec.plType == pl_type) {
+ result = vcm_->RegisterReceiveCodec(&current_receive_codec,
+ number_of_cores_, false);
+ }
+ return result;
+}
+
+int32_t ViEChannel::ReceiveCodecStatistics(uint32_t* num_key_frames,
+ uint32_t* num_delta_frames) {
+ CriticalSectionScoped cs(crit_.get());
+ *num_key_frames = receive_frame_counts_.key_frames;
+ *num_delta_frames = receive_frame_counts_.delta_frames;
+ return 0;
+}
+
+uint32_t ViEChannel::DiscardedPackets() const {
+ return vcm_->DiscardedPackets();
+}
+
+int ViEChannel::ReceiveDelay() const {
+ return vcm_->Delay();
+}
+
+void ViEChannel::SetRTCPMode(const RtcpMode rtcp_mode) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRTCPStatus(rtcp_mode);
+}
+
+void ViEChannel::SetProtectionMode(bool enable_nack,
+ bool enable_fec,
+ int payload_type_red,
+ int payload_type_fec) {
+ // Validate payload types.
+ if (enable_fec) {
+ RTC_DCHECK_GE(payload_type_red, 0);
+ RTC_DCHECK_GE(payload_type_fec, 0);
+ RTC_DCHECK_LE(payload_type_red, 127);
+ RTC_DCHECK_LE(payload_type_fec, 127);
+ } else {
+ RTC_DCHECK_EQ(payload_type_red, -1);
+ RTC_DCHECK_EQ(payload_type_fec, -1);
+ // Set to valid uint8_ts to be castable later without signed overflows.
+ payload_type_red = 0;
+ payload_type_fec = 0;
+ }
+
+ VCMVideoProtection protection_method;
+ if (enable_nack) {
+ protection_method = enable_fec ? kProtectionNackFEC : kProtectionNack;
+ } else {
+ protection_method = kProtectionNone;
+ }
+
+ vcm_->SetVideoProtection(protection_method, true);
+
+ // Set NACK.
+ ProcessNACKRequest(enable_nack);
+
+ // Set FEC.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->SetGenericFECStatus(enable_fec,
+ static_cast<uint8_t>(payload_type_red),
+ static_cast<uint8_t>(payload_type_fec));
+ }
+}
+
+void ViEChannel::ProcessNACKRequest(const bool enable) {
+ if (enable) {
+ // Turn on NACK.
+ if (rtp_rtcp_modules_[0]->RTCP() == RtcpMode::kOff)
+ return;
+ vie_receiver_.SetNackStatus(true, max_nack_reordering_threshold_);
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+
+ vcm_->RegisterPacketRequestCallback(this);
+ // Don't introduce errors when NACK is enabled.
+ vcm_->SetDecodeErrorMode(kNoErrors);
+ } else {
+ vcm_->RegisterPacketRequestCallback(NULL);
+ if (paced_sender_ == nullptr) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(false, 0);
+ }
+ vie_receiver_.SetNackStatus(false, max_nack_reordering_threshold_);
+ // When NACK is off, allow decoding with errors. Otherwise, the video
+ // will freeze, and will only recover with a complete key frame.
+ vcm_->SetDecodeErrorMode(kWithErrors);
+ }
+}
+
+bool ViEChannel::IsSendingFecEnabled() {
+ bool fec_enabled = false;
+ uint8_t pltype_red = 0;
+ uint8_t pltype_fec = 0;
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->GenericFECStatus(fec_enabled, pltype_red, pltype_fec);
+ if (fec_enabled)
+ return true;
+ }
+ return false;
+}
+
+int ViEChannel::SetSenderBufferingMode(int target_delay_ms) {
+ if ((target_delay_ms < 0) || (target_delay_ms > kMaxTargetDelayMs)) {
+ LOG(LS_ERROR) << "Invalid send buffer value.";
+ return -1;
+ }
+ if (target_delay_ms == 0) {
+ // Real-time mode.
+ nack_history_size_sender_ = kSendSidePacketHistorySize;
+ } else {
+ nack_history_size_sender_ = GetRequiredNackListSize(target_delay_ms);
+ // Don't allow a number lower than the default value.
+ if (nack_history_size_sender_ < kSendSidePacketHistorySize) {
+ nack_history_size_sender_ = kSendSidePacketHistorySize;
+ }
+ }
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+ return 0;
+}
+
+int ViEChannel::SetReceiverBufferingMode(int target_delay_ms) {
+ if ((target_delay_ms < 0) || (target_delay_ms > kMaxTargetDelayMs)) {
+ LOG(LS_ERROR) << "Invalid receive buffer delay value.";
+ return -1;
+ }
+ int max_nack_list_size;
+ int max_incomplete_time_ms;
+ if (target_delay_ms == 0) {
+ // Real-time mode - restore default settings.
+ max_nack_reordering_threshold_ = kMaxPacketAgeToNack;
+ max_nack_list_size = kMaxNackListSize;
+ max_incomplete_time_ms = 0;
+ } else {
+ max_nack_list_size = 3 * GetRequiredNackListSize(target_delay_ms) / 4;
+ max_nack_reordering_threshold_ = max_nack_list_size;
+ // Calculate the max incomplete time and round to int.
+ max_incomplete_time_ms = static_cast<int>(kMaxIncompleteTimeMultiplier *
+ target_delay_ms + 0.5f);
+ }
+ vcm_->SetNackSettings(max_nack_list_size, max_nack_reordering_threshold_,
+ max_incomplete_time_ms);
+ vcm_->SetMinReceiverDelay(target_delay_ms);
+ if (vie_sync_.SetTargetBufferingDelay(target_delay_ms) < 0)
+ return -1;
+ return 0;
+}
+
+int ViEChannel::GetRequiredNackListSize(int target_delay_ms) {
+ // The max size of the nack list should be large enough to accommodate the
+ // the number of packets (frames) resulting from the increased delay.
+ // Roughly estimating for ~40 packets per frame @ 30fps.
+ return target_delay_ms * 40 * 30 / 1000;
+}
+
+int ViEChannel::SetSendTimestampOffsetStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset);
+ }
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveTimestampOffsetStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveTimestampOffsetStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendAbsoluteSendTimeStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(kRtpExtensionAbsoluteSendTime);
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveAbsoluteSendTimeStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendVideoRotationStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(kRtpExtensionVideoRotation);
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionVideoRotation, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveVideoRotationStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveVideoRotationStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendTransportSequenceNumber(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber);
+ }
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveTransportSequenceNumber(bool enable, int id) {
+ return vie_receiver_.SetReceiveTransportSequenceNumber(enable, id) ? 0 : -1;
+}
+
+void ViEChannel::SetRtcpXrRrtrStatus(bool enable) {
+ rtp_rtcp_modules_[0]->SetRtcpXrRrtrStatus(enable);
+}
+
+void ViEChannel::EnableTMMBR(bool enable) {
+ rtp_rtcp_modules_[0]->SetTMMBRStatus(enable);
+}
+
+int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
+ const StreamType usage,
+ const uint8_t simulcast_idx) {
+ RtpRtcp* rtp_rtcp = rtp_rtcp_modules_[simulcast_idx];
+ if (usage == kViEStreamTypeRtx) {
+ rtp_rtcp->SetRtxSsrc(SSRC);
+ } else {
+ rtp_rtcp->SetSSRC(SSRC);
+ }
+ return 0;
+}
+
+int32_t ViEChannel::SetRemoteSSRCType(const StreamType usage,
+ const uint32_t SSRC) {
+ vie_receiver_.SetRtxSsrc(SSRC);
+ return 0;
+}
+
+int32_t ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
+ RTC_DCHECK_LE(idx, rtp_rtcp_modules_.size());
+ *ssrc = rtp_rtcp_modules_[idx]->SSRC();
+ return 0;
+}
+
+uint32_t ViEChannel::GetRemoteSSRC() {
+ return vie_receiver_.GetRemoteSsrc();
+}
+
+int ViEChannel::SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRtxSendPayloadType(payload_type, associated_payload_type);
+ SetRtxSendStatus(true);
+ return 0;
+}
+
+void ViEChannel::SetRtxSendStatus(bool enable) {
+ int rtx_settings =
+ enable ? kRtxRetransmitted | kRtxRedundantPayloads : kRtxOff;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRtxSendStatus(rtx_settings);
+}
+
+void ViEChannel::SetRtxReceivePayloadType(int payload_type,
+ int associated_payload_type) {
+ vie_receiver_.SetRtxPayloadType(payload_type, associated_payload_type);
+}
+
+void ViEChannel::SetUseRtxPayloadMappingOnRestore(bool val) {
+ vie_receiver_.SetUseRtxPayloadMappingOnRestore(val);
+}
+
+void ViEChannel::SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ if (rtp_rtcp->SetRtpStateForSsrc(ssrc, rtp_state))
+ return;
+ }
+}
+
+RtpState ViEChannel::GetRtpStateForSsrc(uint32_t ssrc) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ RtpState rtp_state;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ if (rtp_rtcp->GetRtpStateForSsrc(ssrc, &rtp_state))
+ return rtp_state;
+ }
+ LOG(LS_ERROR) << "Couldn't get RTP state for ssrc: " << ssrc;
+ return rtp_state;
+}
+
+// TODO(pbos): Set CNAME on all modules.
+int32_t ViEChannel::SetRTCPCName(const char* rtcp_cname) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ return rtp_rtcp_modules_[0]->SetCNAME(rtcp_cname);
+}
+
+int32_t ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
+ uint32_t remoteSSRC = vie_receiver_.GetRemoteSsrc();
+ return rtp_rtcp_modules_[0]->RemoteCNAME(remoteSSRC, rtcp_cname);
+}
+
+int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
+ uint32_t* cumulative_lost,
+ uint32_t* extended_max,
+ uint32_t* jitter_samples,
+ int64_t* rtt_ms) {
+ // Aggregate the report blocks associated with streams sent on this channel.
+ std::vector<RTCPReportBlock> report_blocks;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RemoteRTCPStat(&report_blocks);
+
+ if (report_blocks.empty())
+ return -1;
+
+ uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
+ std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
+ for (; it != report_blocks.end(); ++it) {
+ if (it->remoteSSRC == remote_ssrc)
+ break;
+ }
+ if (it == report_blocks.end()) {
+ // We have not received packets with an SSRC matching the report blocks. To
+ // have a chance of calculating an RTT we will try with the SSRC of the
+ // first report block received.
+ // This is very important for send-only channels where we don't know the
+ // SSRC of the other end.
+ remote_ssrc = report_blocks[0].remoteSSRC;
+ }
+
+ // TODO(asapersson): Change report_block_stats to not rely on
+ // GetSendRtcpStatistics to be called.
+ RTCPReportBlock report =
+ report_block_stats_sender_->AggregateAndStore(report_blocks);
+ *fraction_lost = report.fractionLost;
+ *cumulative_lost = report.cumulativeLost;
+ *extended_max = report.extendedHighSeqNum;
+ *jitter_samples = report.jitter;
+
+ int64_t dummy;
+ int64_t rtt = 0;
+ if (rtp_rtcp_modules_[0]->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) !=
+ 0) {
+ return -1;
+ }
+ *rtt_ms = rtt;
+ return 0;
+}
+
+void ViEChannel::RegisterSendChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RegisterRtcpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterReceiveChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ vie_receiver_.GetReceiveStatistics()->RegisterRtcpStatisticsCallback(
+ callback);
+ rtp_rtcp_modules_[0]->RegisterRtcpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterRtcpPacketTypeCounterObserver(
+ RtcpPacketTypeCounterObserver* observer) {
+ rtcp_packet_type_counter_observer_.Set(observer);
+}
+
+void ViEChannel::GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const {
+ *rtp_counters = StreamDataCounters();
+ *rtx_counters = StreamDataCounters();
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ StreamDataCounters rtp_data;
+ StreamDataCounters rtx_data;
+ rtp_rtcp->GetSendStreamDataCounters(&rtp_data, &rtx_data);
+ rtp_counters->Add(rtp_data);
+ rtx_counters->Add(rtx_data);
+ }
+}
+
+void ViEChannel::GetReceiveStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const {
+ StreamStatistician* statistician = vie_receiver_.GetReceiveStatistics()->
+ GetStatistician(vie_receiver_.GetRemoteSsrc());
+ if (statistician) {
+ statistician->GetReceiveStreamDataCounters(rtp_counters);
+ }
+ uint32_t rtx_ssrc = 0;
+ if (vie_receiver_.GetRtxSsrc(&rtx_ssrc)) {
+ StreamStatistician* statistician =
+ vie_receiver_.GetReceiveStatistics()->GetStatistician(rtx_ssrc);
+ if (statistician) {
+ statistician->GetReceiveStreamDataCounters(rtx_counters);
+ }
+ }
+}
+
+void ViEChannel::RegisterSendChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterReceiveChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ vie_receiver_.GetReceiveStatistics()->RegisterRtpStatisticsCallback(callback);
+}
+
+void ViEChannel::GetSendRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map =
+ rtcp_packet_type_counter_observer_.GetPacketTypeCounterMap();
+
+ RtcpPacketTypeCounter counter;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ counter.Add(counter_map[rtp_rtcp->SSRC()]);
+ *packet_counter = counter;
+}
+
+void ViEChannel::GetReceiveRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map =
+ rtcp_packet_type_counter_observer_.GetPacketTypeCounterMap();
+
+ RtcpPacketTypeCounter counter;
+ counter.Add(counter_map[vie_receiver_.GetRemoteSsrc()]);
+
+ *packet_counter = counter;
+}
+
+void ViEChannel::RegisterSendSideDelayObserver(
+ SendSideDelayObserver* observer) {
+ send_side_delay_observer_.Set(observer);
+}
+
+void ViEChannel::RegisterSendBitrateObserver(
+ BitrateStatisticsObserver* observer) {
+ send_bitrate_observer_.Set(observer);
+}
+
+int32_t ViEChannel::StartSend() {
+ CriticalSectionScoped cs(crit_.get());
+
+ if (rtp_rtcp_modules_[0]->Sending())
+ return -1;
+
+ for (size_t i = 0; i < num_active_rtp_rtcp_modules_; ++i) {
+ RtpRtcp* rtp_rtcp = rtp_rtcp_modules_[i];
+ rtp_rtcp->SetSendingMediaStatus(true);
+ rtp_rtcp->SetSendingStatus(true);
+ }
+ send_payload_router_->set_active(true);
+ return 0;
+}
+
+int32_t ViEChannel::StopSend() {
+ send_payload_router_->set_active(false);
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetSendingMediaStatus(false);
+
+ if (!rtp_rtcp_modules_[0]->Sending()) {
+ return -1;
+ }
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->SetSendingStatus(false);
+ }
+ return 0;
+}
+
+bool ViEChannel::Sending() {
+ return rtp_rtcp_modules_[0]->Sending();
+}
+
+void ViEChannel::StartReceive() {
+ if (!sender_)
+ StartDecodeThread();
+ vie_receiver_.StartReceive();
+}
+
+void ViEChannel::StopReceive() {
+ vie_receiver_.StopReceive();
+ if (!sender_) {
+ StopDecodeThread();
+ vcm_->ResetDecoder();
+ }
+}
+
+int32_t ViEChannel::ReceivedRTPPacket(const void* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ return vie_receiver_.ReceivedRTPPacket(
+ rtp_packet, rtp_packet_length, packet_time);
+}
+
+int32_t ViEChannel::ReceivedRTCPPacket(const void* rtcp_packet,
+ size_t rtcp_packet_length) {
+ return vie_receiver_.ReceivedRTCPPacket(rtcp_packet, rtcp_packet_length);
+}
+
+int32_t ViEChannel::SetMTU(uint16_t mtu) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetMaxTransferUnit(mtu);
+ return 0;
+}
+
+RtpRtcp* ViEChannel::rtp_rtcp() {
+ return rtp_rtcp_modules_[0];
+}
+
+rtc::scoped_refptr<PayloadRouter> ViEChannel::send_payload_router() {
+ return send_payload_router_;
+}
+
+VCMProtectionCallback* ViEChannel::vcm_protection_callback() {
+ return vcm_protection_callback_.get();
+}
+
+CallStatsObserver* ViEChannel::GetStatsObserver() {
+ return stats_observer_.get();
+}
+
+// Do not acquire the lock of |vcm_| in this function. Decode callback won't
+// necessarily be called from the decoding thread. The decoding thread may have
+// held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring
+// the same lock in the path of decode callback can deadlock.
+int32_t ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT
+ CriticalSectionScoped cs(crit_.get());
+
+ if (pre_render_callback_ != NULL)
+ pre_render_callback_->FrameCallback(&video_frame);
+
+ // TODO(pbos): Remove stream id argument.
+ incoming_video_stream_->RenderFrame(0xFFFFFFFF, video_frame);
+ return 0;
+}
+
+int32_t ViEChannel::ReceivedDecodedReferenceFrame(
+ const uint64_t picture_id) {
+ return rtp_rtcp_modules_[0]->SendRTCPReferencePictureSelection(picture_id);
+}
+
+void ViEChannel::OnIncomingPayloadType(int payload_type) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnIncomingPayloadType(payload_type);
+}
+
+void ViEChannel::OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnIncomingRate(frame_rate, bit_rate);
+}
+
+void ViEChannel::OnDiscardedPacketsUpdated(int discarded_packets) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnDiscardedPacketsUpdated(discarded_packets);
+}
+
+void ViEChannel::OnFrameCountsUpdated(const FrameCounts& frame_counts) {
+ CriticalSectionScoped cs(crit_.get());
+ receive_frame_counts_ = frame_counts;
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnFrameCountsUpdated(frame_counts);
+}
+
+void ViEChannel::OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) {
+ CriticalSectionScoped cs(crit_.get());
+ if (!receive_stats_callback_)
+ return;
+ receive_stats_callback_->OnDecoderTiming(
+ decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
+ jitter_buffer_ms, min_playout_delay_ms, render_delay_ms, last_rtt_ms_);
+}
+
+int32_t ViEChannel::RequestKeyFrame() {
+ return rtp_rtcp_modules_[0]->RequestKeyFrame();
+}
+
+int32_t ViEChannel::SliceLossIndicationRequest(
+ const uint64_t picture_id) {
+ return rtp_rtcp_modules_[0]->SendRTCPSliceLossIndication(
+ static_cast<uint8_t>(picture_id));
+}
+
+int32_t ViEChannel::ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) {
+ return rtp_rtcp_modules_[0]->SendNACK(sequence_numbers, length);
+}
+
+bool ViEChannel::ChannelDecodeThreadFunction(void* obj) {
+ return static_cast<ViEChannel*>(obj)->ChannelDecodeProcess();
+}
+
+bool ViEChannel::ChannelDecodeProcess() {
+ vcm_->Decode(kMaxDecodeWaitTimeMs);
+ return true;
+}
+
+void ViEChannel::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
+ vcm_->SetReceiveChannelParameters(max_rtt_ms);
+
+ CriticalSectionScoped cs(crit_.get());
+ if (time_of_first_rtt_ms_ == -1)
+ time_of_first_rtt_ms_ = Clock::GetRealTimeClock()->TimeInMilliseconds();
+ rtt_sum_ms_ += avg_rtt_ms;
+ last_rtt_ms_ = avg_rtt_ms;
+ ++num_rtts_;
+}
+
+int ViEChannel::ProtectionRequest(const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* video_rate_bps,
+ uint32_t* nack_rate_bps,
+ uint32_t* fec_rate_bps) {
+ *video_rate_bps = 0;
+ *nack_rate_bps = 0;
+ *fec_rate_bps = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ uint32_t not_used = 0;
+ uint32_t module_video_rate = 0;
+ uint32_t module_fec_rate = 0;
+ uint32_t module_nack_rate = 0;
+ rtp_rtcp->SetFecParameters(delta_fec_params, key_fec_params);
+ rtp_rtcp->BitrateSent(&not_used, &module_video_rate, &module_fec_rate,
+ &module_nack_rate);
+ *video_rate_bps += module_video_rate;
+ *nack_rate_bps += module_nack_rate;
+ *fec_rate_bps += module_fec_rate;
+ }
+ return 0;
+}
+
+std::vector<RtpRtcp*> ViEChannel::CreateRtpRtcpModules(
+ bool receiver_only,
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpIntraFrameObserver* intra_frame_callback,
+ RtcpBandwidthObserver* bandwidth_callback,
+ TransportFeedbackObserver* transport_feedback_callback,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpPacketSender* paced_sender,
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator,
+ BitrateStatisticsObserver* send_bitrate_observer,
+ FrameCountObserver* send_frame_count_observer,
+ SendSideDelayObserver* send_side_delay_observer,
+ size_t num_modules) {
+ RTC_DCHECK_GT(num_modules, 0u);
+ RtpRtcp::Configuration configuration;
+ ReceiveStatistics* null_receive_statistics = configuration.receive_statistics;
+ configuration.audio = false;
+ configuration.receiver_only = receiver_only;
+ configuration.receive_statistics = receive_statistics;
+ configuration.outgoing_transport = outgoing_transport;
+ configuration.intra_frame_callback = intra_frame_callback;
+ configuration.rtt_stats = rtt_stats;
+ configuration.rtcp_packet_type_counter_observer =
+ rtcp_packet_type_counter_observer;
+ configuration.paced_sender = paced_sender;
+ configuration.transport_sequence_number_allocator =
+ transport_sequence_number_allocator;
+ configuration.send_bitrate_observer = send_bitrate_observer;
+ configuration.send_frame_count_observer = send_frame_count_observer;
+ configuration.send_side_delay_observer = send_side_delay_observer;
+ configuration.bandwidth_callback = bandwidth_callback;
+ configuration.transport_feedback_callback = transport_feedback_callback;
+
+ std::vector<RtpRtcp*> modules;
+ for (size_t i = 0; i < num_modules; ++i) {
+ RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration);
+ rtp_rtcp->SetSendingStatus(false);
+ rtp_rtcp->SetSendingMediaStatus(false);
+ rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
+ modules.push_back(rtp_rtcp);
+ // Receive statistics and remote bitrate estimator should only be set for
+ // the primary (first) module.
+ configuration.receive_statistics = null_receive_statistics;
+ configuration.remote_bitrate_estimator = nullptr;
+ }
+ return modules;
+}
+
+void ViEChannel::StartDecodeThread() {
+ RTC_DCHECK(!sender_);
+ // Start the decode thread
+ if (decode_thread_)
+ return;
+ decode_thread_ = ThreadWrapper::CreateThread(ChannelDecodeThreadFunction,
+ this, "DecodingThread");
+ decode_thread_->Start();
+ decode_thread_->SetPriority(kHighestPriority);
+}
+
+void ViEChannel::StopDecodeThread() {
+ if (!decode_thread_)
+ return;
+
+ vcm_->TriggerDecoderShutdown();
+
+ decode_thread_->Stop();
+ decode_thread_.reset();
+}
+
+int32_t ViEChannel::SetVoiceChannel(int32_t ve_channel_id,
+ VoEVideoSync* ve_sync_interface) {
+ return vie_sync_.ConfigureSync(ve_channel_id, ve_sync_interface,
+ rtp_rtcp_modules_[0],
+ vie_receiver_.GetRtpReceiver());
+}
+
+int32_t ViEChannel::VoiceChannel() {
+ return vie_sync_.VoiceChannel();
+}
+
+void ViEChannel::RegisterPreRenderCallback(
+ I420FrameCallback* pre_render_callback) {
+ CriticalSectionScoped cs(crit_.get());
+ pre_render_callback_ = pre_render_callback;
+}
+
+void ViEChannel::RegisterPreDecodeImageCallback(
+ EncodedImageCallback* pre_decode_callback) {
+ vcm_->RegisterPreDecodeImageCallback(pre_decode_callback);
+}
+
+// TODO(pbos): Remove OnInitializeDecoder which is called from the RTP module,
+// any decoder resetting should be handled internally within the VCM.
+int32_t ViEChannel::OnInitializeDecoder(
+ const int8_t payload_type,
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const uint8_t channels,
+ const uint32_t rate) {
+ LOG(LS_INFO) << "OnInitializeDecoder " << static_cast<int>(payload_type)
+ << " " << payload_name;
+ vcm_->ResetDecoder();
+
+ return 0;
+}
+
+void ViEChannel::OnIncomingSSRCChanged(const uint32_t ssrc) {
+ rtp_rtcp_modules_[0]->SetRemoteSSRC(ssrc);
+}
+
+void ViEChannel::OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) {}
+
+void ViEChannel::RegisterSendFrameCountObserver(
+ FrameCountObserver* observer) {
+ send_frame_count_observer_.Set(observer);
+}
+
+void ViEChannel::RegisterReceiveStatisticsProxy(
+ ReceiveStatisticsProxy* receive_statistics_proxy) {
+ CriticalSectionScoped cs(crit_.get());
+ receive_stats_callback_ = receive_statistics_proxy;
+}
+
+void ViEChannel::SetIncomingVideoStream(
+ IncomingVideoStream* incoming_video_stream) {
+ CriticalSectionScoped cs(crit_.get());
+ incoming_video_stream_ = incoming_video_stream;
+}
+} // namespace webrtc
diff --git a/webrtc/video_engine/vie_channel.h b/webrtc/video_engine/vie_channel.h
new file mode 100644
index 0000000000..41c679aafa
--- /dev/null
+++ b/webrtc/video_engine/vie_channel.h
@@ -0,0 +1,458 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
+
+#include <list>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_engine/vie_defines.h"
+#include "webrtc/video_engine/vie_receiver.h"
+#include "webrtc/video_engine/vie_sync_module.h"
+
+namespace webrtc {
+
+class CallStatsObserver;
+class ChannelStatsObserver;
+class Config;
+class CriticalSectionWrapper;
+class EncodedImageCallback;
+class I420FrameCallback;
+class IncomingVideoStream;
+class PacedSender;
+class PacketRouter;
+class PayloadRouter;
+class ProcessThread;
+class ReceiveStatisticsProxy;
+class ReportBlockStats;
+class RtcpRttStats;
+class ThreadWrapper;
+class ViEChannelProtectionCallback;
+class ViERTPObserver;
+class VideoCodingModule;
+class VideoDecoder;
+class VideoRenderCallback;
+class VoEVideoSync;
+
+enum StreamType {
+ kViEStreamTypeNormal = 0, // Normal media stream
+ kViEStreamTypeRtx = 1 // Retransmission media stream
+};
+
+class ViEChannel : public VCMFrameTypeCallback,
+ public VCMReceiveCallback,
+ public VCMReceiveStatisticsCallback,
+ public VCMDecoderTimingCallback,
+ public VCMPacketRequestCallback,
+ public RtpFeedback {
+ public:
+ friend class ChannelStatsObserver;
+ friend class ViEChannelProtectionCallback;
+
+ ViEChannel(uint32_t number_of_cores,
+ Transport* transport,
+ ProcessThread* module_process_thread,
+ RtcpIntraFrameObserver* intra_frame_observer,
+ RtcpBandwidthObserver* bandwidth_observer,
+ TransportFeedbackObserver* transport_feedback_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtcpRttStats* rtt_stats,
+ PacedSender* paced_sender,
+ PacketRouter* packet_router,
+ size_t max_rtp_streams,
+ bool sender);
+ ~ViEChannel();
+
+ int32_t Init();
+
+ // Sets the encoder to use for the channel. |new_stream| indicates the encoder
+ // type has changed and we should start a new RTP stream.
+ int32_t SetSendCodec(const VideoCodec& video_codec, bool new_stream = true);
+ int32_t SetReceiveCodec(const VideoCodec& video_codec);
+ // Registers an external decoder. |buffered_rendering| means that the decoder
+ // will render frames after decoding according to the render timestamp
+ // provided by the video coding module. |render_delay| indicates the time
+ // needed to decode and render a frame.
+ int32_t RegisterExternalDecoder(const uint8_t pl_type,
+ VideoDecoder* decoder,
+ bool buffered_rendering,
+ int32_t render_delay);
+ int32_t DeRegisterExternalDecoder(const uint8_t pl_type);
+ int32_t ReceiveCodecStatistics(uint32_t* num_key_frames,
+ uint32_t* num_delta_frames);
+ uint32_t DiscardedPackets() const;
+
+ // Returns the estimated delay in milliseconds.
+ int ReceiveDelay() const;
+
+ void SetRTCPMode(const RtcpMode rtcp_mode);
+ void SetProtectionMode(bool enable_nack,
+ bool enable_fec,
+ int payload_type_red,
+ int payload_type_fec);
+ bool IsSendingFecEnabled();
+ int SetSenderBufferingMode(int target_delay_ms);
+ int SetReceiverBufferingMode(int target_delay_ms);
+ int SetSendTimestampOffsetStatus(bool enable, int id);
+ int SetReceiveTimestampOffsetStatus(bool enable, int id);
+ int SetSendAbsoluteSendTimeStatus(bool enable, int id);
+ int SetReceiveAbsoluteSendTimeStatus(bool enable, int id);
+ int SetSendVideoRotationStatus(bool enable, int id);
+ int SetReceiveVideoRotationStatus(bool enable, int id);
+ int SetSendTransportSequenceNumber(bool enable, int id);
+ int SetReceiveTransportSequenceNumber(bool enable, int id);
+ void SetRtcpXrRrtrStatus(bool enable);
+ void EnableTMMBR(bool enable);
+
+ // Sets SSRC for outgoing stream.
+ int32_t SetSSRC(const uint32_t SSRC,
+ const StreamType usage,
+ const unsigned char simulcast_idx);
+
+ // Gets SSRC for outgoing stream number |idx|.
+ int32_t GetLocalSSRC(uint8_t idx, unsigned int* ssrc);
+
+ // Gets SSRC for the incoming stream.
+ uint32_t GetRemoteSSRC();
+
+ int SetRtxSendPayloadType(int payload_type, int associated_payload_type);
+ void SetRtxReceivePayloadType(int payload_type, int associated_payload_type);
+ // If set to true, the RTX payload type mapping supplied in
+ // |SetRtxReceivePayloadType| will be used when restoring RTX packets. Without
+ // it, RTX packets will always be restored to the last non-RTX packet payload
+ // type received.
+ void SetUseRtxPayloadMappingOnRestore(bool val);
+
+ void SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state);
+ RtpState GetRtpStateForSsrc(uint32_t ssrc);
+
+ // Sets the CName for the outgoing stream on the channel.
+ int32_t SetRTCPCName(const char* rtcp_cname);
+
+ // Gets the CName of the incoming stream.
+ int32_t GetRemoteRTCPCName(char rtcp_cname[]);
+
+ // Returns statistics reported by the remote client in an RTCP packet.
+ // TODO(pbos): Remove this along with VideoSendStream::GetRtt().
+ int32_t GetSendRtcpStatistics(uint16_t* fraction_lost,
+ uint32_t* cumulative_lost,
+ uint32_t* extended_max,
+ uint32_t* jitter_samples,
+ int64_t* rtt_ms);
+
+ // Called on receipt of RTCP report block from remote side.
+ void RegisterSendChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback);
+
+ // Called on generation of RTCP stats
+ void RegisterReceiveChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback);
+
+ // Gets send statistics for the rtp and rtx stream.
+ void GetSendStreamDataCounters(StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const;
+
+ // Gets received stream data counters.
+ void GetReceiveStreamDataCounters(StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const;
+
+ // Called on update of RTP statistics.
+ void RegisterSendChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback);
+
+ // Called on update of RTP statistics.
+ void RegisterReceiveChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback);
+
+ void GetSendRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const;
+
+ void GetReceiveRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const;
+
+ void RegisterSendSideDelayObserver(SendSideDelayObserver* observer);
+
+ // Called on any new send bitrate estimate.
+ void RegisterSendBitrateObserver(BitrateStatisticsObserver* observer);
+
+ // Implements RtpFeedback.
+ int32_t OnInitializeDecoder(const int8_t payload_type,
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const uint8_t channels,
+ const uint32_t rate) override;
+ void OnIncomingSSRCChanged(const uint32_t ssrc) override;
+ void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) override;
+
+ int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC);
+
+ int32_t StartSend();
+ int32_t StopSend();
+ bool Sending();
+ void StartReceive();
+ void StopReceive();
+
+ int32_t ReceivedRTPPacket(const void* rtp_packet,
+ const size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ int32_t ReceivedRTCPPacket(const void* rtcp_packet,
+ const size_t rtcp_packet_length);
+
+ // Sets the maximum transfer unit size for the network link, i.e. including
+ // IP, UDP and RTP headers.
+ int32_t SetMTU(uint16_t mtu);
+
+ // Gets the modules used by the channel.
+ RtpRtcp* rtp_rtcp();
+ rtc::scoped_refptr<PayloadRouter> send_payload_router();
+ VCMProtectionCallback* vcm_protection_callback();
+
+
+ CallStatsObserver* GetStatsObserver();
+
+ // Implements VCMReceiveCallback.
+ virtual int32_t FrameToRender(VideoFrame& video_frame); // NOLINT
+
+ // Implements VCMReceiveCallback.
+ virtual int32_t ReceivedDecodedReferenceFrame(
+ const uint64_t picture_id);
+
+ // Implements VCMReceiveCallback.
+ void OnIncomingPayloadType(int payload_type) override;
+
+ // Implements VCMReceiveStatisticsCallback.
+ void OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) override;
+ void OnDiscardedPacketsUpdated(int discarded_packets) override;
+ void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
+
+ // Implements VCMDecoderTimingCallback.
+ virtual void OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms);
+
+ // Implements FrameTypeCallback.
+ virtual int32_t RequestKeyFrame();
+
+ // Implements FrameTypeCallback.
+ virtual int32_t SliceLossIndicationRequest(
+ const uint64_t picture_id);
+
+ // Implements VideoPacketRequestCallback.
+ int32_t ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) override;
+
+ int32_t SetVoiceChannel(int32_t ve_channel_id,
+ VoEVideoSync* ve_sync_interface);
+ int32_t VoiceChannel();
+
+ // New-style callbacks, used by VideoReceiveStream.
+ void RegisterPreRenderCallback(I420FrameCallback* pre_render_callback);
+ void RegisterPreDecodeImageCallback(
+ EncodedImageCallback* pre_decode_callback);
+
+ void RegisterSendFrameCountObserver(FrameCountObserver* observer);
+ void RegisterRtcpPacketTypeCounterObserver(
+ RtcpPacketTypeCounterObserver* observer);
+ void RegisterReceiveStatisticsProxy(
+ ReceiveStatisticsProxy* receive_statistics_proxy);
+ void SetIncomingVideoStream(IncomingVideoStream* incoming_video_stream);
+
+ protected:
+ static bool ChannelDecodeThreadFunction(void* obj);
+ bool ChannelDecodeProcess();
+
+ void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms);
+
+ int ProtectionRequest(const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps);
+
+ private:
+ static std::vector<RtpRtcp*> CreateRtpRtcpModules(
+ bool receiver_only,
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpIntraFrameObserver* intra_frame_callback,
+ RtcpBandwidthObserver* bandwidth_callback,
+ TransportFeedbackObserver* transport_feedback_callback,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpPacketSender* paced_sender,
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator,
+ BitrateStatisticsObserver* send_bitrate_observer,
+ FrameCountObserver* send_frame_count_observer,
+ SendSideDelayObserver* send_side_delay_observer,
+ size_t num_modules);
+
+ // Assumed to be protected.
+ void StartDecodeThread();
+ void StopDecodeThread();
+
+ void ProcessNACKRequest(const bool enable);
+ // Compute NACK list parameters for the buffering mode.
+ int GetRequiredNackListSize(int target_delay_ms);
+ void SetRtxSendStatus(bool enable);
+
+ void UpdateHistograms();
+
+ // ViEChannel exposes methods that allow to modify observers and callbacks
+ // to be modified. Such an API-style is cumbersome to implement and maintain
+ // at all the levels when comparing to only setting them at construction. As
+ // so this class instantiates its children with a wrapper that can be modified
+ // at a later time.
+ template <class T>
+ class RegisterableCallback : public T {
+ public:
+ RegisterableCallback()
+ : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+ callback_(NULL) {}
+
+ void Set(T* callback) {
+ CriticalSectionScoped cs(critsect_.get());
+ callback_ = callback;
+ }
+
+ protected:
+ // Note: this should be implemented with a RW-lock to allow simultaneous
+ // calls into the callback. However that doesn't seem to be needed for the
+ // current type of callbacks covered by this class.
+ rtc::scoped_ptr<CriticalSectionWrapper> critsect_;
+ T* callback_ GUARDED_BY(critsect_);
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(RegisterableCallback);
+ };
+
+ class RegisterableBitrateStatisticsObserver:
+ public RegisterableCallback<BitrateStatisticsObserver> {
+ virtual void Notify(const BitrateStatistics& total_stats,
+ const BitrateStatistics& retransmit_stats,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->Notify(total_stats, retransmit_stats, ssrc);
+ }
+ } send_bitrate_observer_;
+
+ class RegisterableFrameCountObserver
+ : public RegisterableCallback<FrameCountObserver> {
+ public:
+ virtual void FrameCountUpdated(const FrameCounts& frame_counts,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->FrameCountUpdated(frame_counts, ssrc);
+ }
+
+ private:
+ } send_frame_count_observer_;
+
+ class RegisterableSendSideDelayObserver :
+ public RegisterableCallback<SendSideDelayObserver> {
+ void SendSideDelayUpdated(int avg_delay_ms,
+ int max_delay_ms,
+ uint32_t ssrc) override {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, ssrc);
+ }
+ } send_side_delay_observer_;
+
+ class RegisterableRtcpPacketTypeCounterObserver
+ : public RegisterableCallback<RtcpPacketTypeCounterObserver> {
+ public:
+ void RtcpPacketTypesCounterUpdated(
+ uint32_t ssrc,
+ const RtcpPacketTypeCounter& packet_counter) override {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->RtcpPacketTypesCounterUpdated(ssrc, packet_counter);
+ counter_map_[ssrc] = packet_counter;
+ }
+
+ virtual std::map<uint32_t, RtcpPacketTypeCounter> GetPacketTypeCounterMap()
+ const {
+ CriticalSectionScoped cs(critsect_.get());
+ return counter_map_;
+ }
+
+ private:
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map_
+ GUARDED_BY(critsect_);
+ } rtcp_packet_type_counter_observer_;
+
+ const uint32_t number_of_cores_;
+ const bool sender_;
+
+ ProcessThread* const module_process_thread_;
+
+ // Used for all registered callbacks except rendering.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Owned modules/classes.
+ rtc::scoped_refptr<PayloadRouter> send_payload_router_;
+ rtc::scoped_ptr<ViEChannelProtectionCallback> vcm_protection_callback_;
+
+ VideoCodingModule* const vcm_;
+ ViEReceiver vie_receiver_;
+ ViESyncModule vie_sync_;
+
+ // Helper to report call statistics.
+ rtc::scoped_ptr<ChannelStatsObserver> stats_observer_;
+
+ // Not owned.
+ ReceiveStatisticsProxy* receive_stats_callback_ GUARDED_BY(crit_);
+ FrameCounts receive_frame_counts_ GUARDED_BY(crit_);
+ IncomingVideoStream* incoming_video_stream_ GUARDED_BY(crit_);
+ RtcpIntraFrameObserver* const intra_frame_observer_;
+ RtcpRttStats* const rtt_stats_;
+ PacedSender* const paced_sender_;
+ PacketRouter* const packet_router_;
+
+ const rtc::scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
+ TransportFeedbackObserver* const transport_feedback_observer_;
+
+ rtc::scoped_ptr<ThreadWrapper> decode_thread_;
+
+ int nack_history_size_sender_;
+ int max_nack_reordering_threshold_;
+ I420FrameCallback* pre_render_callback_ GUARDED_BY(crit_);
+
+ const rtc::scoped_ptr<ReportBlockStats> report_block_stats_sender_;
+
+ int64_t time_of_first_rtt_ms_ GUARDED_BY(crit_);
+ int64_t rtt_sum_ms_ GUARDED_BY(crit_);
+ int64_t last_rtt_ms_ GUARDED_BY(crit_);
+ size_t num_rtts_ GUARDED_BY(crit_);
+
+ // RtpRtcp modules, declared last as they use other members on construction.
+ const std::vector<RtpRtcp*> rtp_rtcp_modules_;
+ size_t num_active_rtp_rtcp_modules_ GUARDED_BY(crit_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
diff --git a/webrtc/video_engine/vie_codec_unittest.cc b/webrtc/video_engine/vie_codec_unittest.cc
new file mode 100644
index 0000000000..9f648ec521
--- /dev/null
+++ b/webrtc/video_engine/vie_codec_unittest.cc
@@ -0,0 +1,230 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_types.h"
+
+namespace webrtc {
+
+// Builds VP8 codec with 0 simulcast streams.
+void BuildVP8Codec(webrtc::VideoCodec* video_codec) {
+ video_codec->codecType = kVideoCodecVP8;
+ strncpy(video_codec->plName, "VP8", 4);
+ video_codec->plType = 100;
+ video_codec->width = 1280;
+ video_codec->height = 720;
+
+ video_codec->startBitrate = 1000; // kbps
+ video_codec->maxBitrate = 2000; // kbps
+ video_codec->minBitrate = 1000; // kbps
+ video_codec->maxFramerate = 30;
+
+ video_codec->qpMax = 50;
+ video_codec->numberOfSimulcastStreams = 0;
+ video_codec->mode = kRealtimeVideo;
+
+ // Set VP8 codec specific info.
+ video_codec->codecSpecific.VP8.pictureLossIndicationOn = true;
+ video_codec->codecSpecific.VP8.feedbackModeOn = true;
+ video_codec->codecSpecific.VP8.complexity = kComplexityNormal;
+ video_codec->codecSpecific.VP8.resilience = kResilienceOff;
+ video_codec->codecSpecific.VP8.numberOfTemporalLayers = 0;
+ video_codec->codecSpecific.VP8.denoisingOn = true;
+ video_codec->codecSpecific.VP8.errorConcealmentOn = true;
+ video_codec->codecSpecific.VP8.automaticResizeOn = true;
+ video_codec->codecSpecific.VP8.frameDroppingOn = true;
+ video_codec->codecSpecific.VP8.keyFrameInterval = 200;
+}
+
+
+void SetSimulcastSettings(webrtc::VideoCodec* video_codec) {
+ // Simulcast settings.
+ video_codec->numberOfSimulcastStreams = 1;
+ video_codec->simulcastStream[0].width = 320;
+ video_codec->simulcastStream[0].height = 180;
+ video_codec->simulcastStream[0].numberOfTemporalLayers = 0;
+ video_codec->simulcastStream[0].maxBitrate = 100;
+ video_codec->simulcastStream[0].targetBitrate = 100;
+ video_codec->simulcastStream[0].minBitrate = 0;
+ video_codec->simulcastStream[0].qpMax = video_codec->qpMax;
+}
+
+
+// This test compares two VideoCodecInst objects except codec specific and
+// simulcast streams.
+TEST(ViECodecTest, TestCompareCodecs) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+
+ EXPECT_TRUE(codec1 == codec2);
+ EXPECT_FALSE(codec1 != codec2);
+
+ // plname is case insensitive.
+ strncpy(codec2.plName, "vp8", 4);
+ EXPECT_TRUE(codec1 == codec2);
+
+ codec2.codecType = kVideoCodecUnknown;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify pltype.
+ BuildVP8Codec(&codec2);
+ codec2.plType = 101;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modifing height and width.
+ BuildVP8Codec(&codec2);
+ codec2.width = 640;
+ codec2.height = 480;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify framerate, default value is 30.
+ BuildVP8Codec(&codec2);
+ codec2.maxFramerate = 15;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modifying startBitrate, default value is 1000 kbps.
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 2000;
+ EXPECT_FALSE(codec1 == codec2);
+ // maxBitrate
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 3000;
+ EXPECT_FALSE(codec1 == codec2);
+ // minBirate
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 500;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify qpMax.
+ BuildVP8Codec(&codec2);
+ codec2.qpMax = 100;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify mode
+ BuildVP8Codec(&codec2);
+ codec2.mode = kScreensharing;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+// Test VP8 specific comparision.
+TEST(ViECodecTest, TestCompareVP8CodecSpecific) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+ EXPECT_TRUE(codec1 == codec2);
+
+ // pictureLossIndicationOn
+ codec2.codecSpecific.VP8.pictureLossIndicationOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // feedbackModeOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.feedbackModeOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // complexity
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.complexity = kComplexityHigh;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // resilience
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.resilience = kResilientStream;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // numberOfTemporalLayers
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.numberOfTemporalLayers = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // denoisingOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.denoisingOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // errorConcealmentOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.errorConcealmentOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // pictureLossIndicationOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.automaticResizeOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // frameDroppingOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.frameDroppingOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // keyFrameInterval
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.keyFrameInterval = 100;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+// This test compares simulcast stream information in VideoCodec.
+TEST(ViECodecTest, TestCompareSimulcastStreams) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+ // Set simulacast settings.
+ SetSimulcastSettings(&codec1);
+ SetSimulcastSettings(&codec2);
+ EXPECT_TRUE(codec1 == codec2);
+
+ // Modify number of streams.
+ codec2.numberOfSimulcastStreams = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Resetting steram count.
+ codec2.numberOfSimulcastStreams = 1;
+ // Modify height and width in codec2.
+ codec2.simulcastStream[0].width = 640;
+ codec2.simulcastStream[0].height = 480;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // numberOfTemporalLayers
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].numberOfTemporalLayers = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // maxBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].maxBitrate = 1000;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // targetBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].targetBitrate = 1000;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // minBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].minBitrate = 50;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // qpMax
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].qpMax = 100;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/vie_defines.h b/webrtc/video_engine/vie_defines.h
new file mode 100644
index 0000000000..59b56a54fd
--- /dev/null
+++ b/webrtc/video_engine/vie_defines.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_DEFINES_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_DEFINES_H_
+
+#include "webrtc/engine_configurations.h"
+
+// TODO(mflodman) Remove.
+#ifdef WEBRTC_ANDROID
+#include <arpa/inet.h> // NOLINT
+#include <linux/net.h> // NOLINT
+#include <netinet/in.h> // NOLINT
+#include <pthread.h> // NOLINT
+#include <stdio.h> // NOLINT
+#include <stdlib.h> // NOLINT
+#include <string.h> // NOLINT
+#include <sys/socket.h> // NOLINT
+#include <sys/time.h> // NOLINT
+#include <sys/types.h> // NOLINT
+#include <time.h> // NOLINT
+#endif
+
+namespace webrtc {
+
+// General
+enum { kViEMinKeyRequestIntervalMs = 300 };
+
+// ViEBase
+enum { kViEMaxNumberOfChannels = 64 };
+
+// ViECodec
+enum { kViEMaxCodecWidth = 4096 };
+enum { kViEMaxCodecHeight = 3072 };
+enum { kViEMaxCodecFramerate = 60 };
+enum { kViEMinCodecBitrate = 30 };
+
+// ViENetwork
+enum { kViEMaxMtu = 1500 };
+enum { kViESocketThreads = 1 };
+enum { kViENumReceiveSocketBuffers = 500 };
+
+// ViERender
+// Max valid time set in SetRenderTimeoutImage
+enum { kViEMaxRenderTimeoutTimeMs = 10000 };
+// Min valid time set in SetRenderTimeoutImage
+enum { kViEMinRenderTimeoutTimeMs = 33 };
+enum { kViEDefaultRenderDelayMs = 10 };
+
+// ViERTP_RTCP
+enum { kSendSidePacketHistorySize = 600 };
+
+// NACK
+enum { kMaxPacketAgeToNack = 450 }; // In sequence numbers.
+enum { kMaxNackListSize = 250 };
+
+// Id definitions
+enum {
+ kViEChannelIdBase = 0x0,
+ kViEChannelIdMax = 0xFF,
+ kViEDummyChannelId = 0xFFFF
+};
+
+// Module id
+// Create a unique id based on the ViE instance id and the
+// channel id. ViE id > 0 and 0 <= channel id <= 255
+
+inline int ViEId(const int vieId, const int channelId = -1) {
+ if (channelId == -1) {
+ return static_cast<int>((vieId << 16) + kViEDummyChannelId);
+ }
+ return static_cast<int>((vieId << 16) + channelId);
+}
+
+inline int ViEModuleId(const int vieId, const int channelId = -1) {
+ if (channelId == -1) {
+ return static_cast<int>((vieId << 16) + kViEDummyChannelId);
+ }
+ return static_cast<int>((vieId << 16) + channelId);
+}
+
+inline int ChannelId(const int moduleId) {
+ return static_cast<int>(moduleId & 0xffff);
+}
+
+// Windows specific.
+#if defined(_WIN32)
+ #define RENDER_MODULE_TYPE kRenderWindows
+
+ // Include libraries.
+ #pragma comment(lib, "winmm.lib")
+
+ #ifndef WEBRTC_EXTERNAL_TRANSPORT
+ #pragma comment(lib, "ws2_32.lib")
+ #pragma comment(lib, "Iphlpapi.lib") // _GetAdaptersAddresses
+ #endif
+#endif
+
+// Mac specific.
+#ifdef WEBRTC_MAC
+ #define SLEEP(x) usleep(x * 1000)
+ #define RENDER_MODULE_TYPE kRenderWindows
+#endif
+
+// Android specific.
+#ifdef WEBRTC_ANDROID
+ #define FAR
+ #define __cdecl
+#endif // WEBRTC_ANDROID
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_VIE_DEFINES_H_
diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc
new file mode 100644
index 0000000000..0f4a5a14f5
--- /dev/null
+++ b/webrtc/video_engine/vie_encoder.cc
@@ -0,0 +1,710 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/vie_encoder.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/common_video/interface/video_image.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/include/paced_sender.h"
+#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/main/interface/video_coding.h"
+#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
+#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/send_statistics_proxy.h"
+#include "webrtc/video_engine/payload_router.h"
+#include "webrtc/video_engine/vie_defines.h"
+
+namespace webrtc {
+
+// Margin on when we pause the encoder when the pacing buffer overflows relative
+// to the configured buffer delay.
+static const float kEncoderPausePacerMargin = 2.0f;
+
+// Don't stop the encoder unless the delay is above this configured value.
+static const int kMinPacingDelayMs = 200;
+
+static const float kStopPaddingThresholdMs = 2000;
+
+std::vector<uint32_t> AllocateStreamBitrates(
+ uint32_t total_bitrate,
+ const SimulcastStream* stream_configs,
+ size_t number_of_streams) {
+ if (number_of_streams == 0) {
+ std::vector<uint32_t> stream_bitrates(1, 0);
+ stream_bitrates[0] = total_bitrate;
+ return stream_bitrates;
+ }
+ std::vector<uint32_t> stream_bitrates(number_of_streams, 0);
+ uint32_t bitrate_remainder = total_bitrate;
+ for (size_t i = 0; i < stream_bitrates.size() && bitrate_remainder > 0; ++i) {
+ if (stream_configs[i].maxBitrate * 1000 > bitrate_remainder) {
+ stream_bitrates[i] = bitrate_remainder;
+ } else {
+ stream_bitrates[i] = stream_configs[i].maxBitrate * 1000;
+ }
+ bitrate_remainder -= stream_bitrates[i];
+ }
+ return stream_bitrates;
+}
+
+class QMVideoSettingsCallback : public VCMQMSettingsCallback {
+ public:
+ explicit QMVideoSettingsCallback(VideoProcessingModule* vpm);
+
+ ~QMVideoSettingsCallback();
+
+ // Update VPM with QM (quality modes: frame size & frame rate) settings.
+ int32_t SetVideoQMSettings(const uint32_t frame_rate,
+ const uint32_t width,
+ const uint32_t height);
+
+ // Update target frame rate.
+ void SetTargetFramerate(int frame_rate);
+
+ private:
+ VideoProcessingModule* vpm_;
+};
+
+class ViEBitrateObserver : public BitrateObserver {
+ public:
+ explicit ViEBitrateObserver(ViEEncoder* owner)
+ : owner_(owner) {
+ }
+ virtual ~ViEBitrateObserver() {}
+ // Implements BitrateObserver.
+ virtual void OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t rtt) {
+ owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
+ }
+ private:
+ ViEEncoder* owner_;
+};
+
+ViEEncoder::ViEEncoder(uint32_t number_of_cores,
+ ProcessThread* module_process_thread,
+ SendStatisticsProxy* stats_proxy,
+ I420FrameCallback* pre_encode_callback,
+ PacedSender* pacer,
+ BitrateAllocator* bitrate_allocator)
+ : number_of_cores_(number_of_cores),
+ vpm_(VideoProcessingModule::Create()),
+ qm_callback_(new QMVideoSettingsCallback(vpm_.get())),
+ vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
+ this,
+ qm_callback_.get())),
+ send_payload_router_(NULL),
+ data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ stats_proxy_(stats_proxy),
+ pre_encode_callback_(pre_encode_callback),
+ pacer_(pacer),
+ bitrate_allocator_(bitrate_allocator),
+ time_of_last_frame_activity_ms_(0),
+ simulcast_enabled_(false),
+ min_transmit_bitrate_kbps_(0),
+ last_observed_bitrate_bps_(0),
+ target_delay_ms_(0),
+ network_is_transmitting_(true),
+ encoder_paused_(false),
+ encoder_paused_and_dropped_frame_(false),
+ fec_enabled_(false),
+ nack_enabled_(false),
+ module_process_thread_(module_process_thread),
+ has_received_sli_(false),
+ picture_id_sli_(0),
+ has_received_rpsi_(false),
+ picture_id_rpsi_(0),
+ video_suspended_(false) {
+ bitrate_observer_.reset(new ViEBitrateObserver(this));
+}
+
+bool ViEEncoder::Init() {
+ vpm_->EnableTemporalDecimation(true);
+
+ // Enable/disable content analysis: off by default for now.
+ vpm_->EnableContentAnalysis(false);
+
+ if (vcm_->RegisterTransportCallback(this) != 0) {
+ return false;
+ }
+ if (vcm_->RegisterSendStatisticsCallback(this) != 0) {
+ return false;
+ }
+ return true;
+}
+
+void ViEEncoder::StartThreadsAndSetSharedMembers(
+ rtc::scoped_refptr<PayloadRouter> send_payload_router,
+ VCMProtectionCallback* vcm_protection_callback) {
+ RTC_DCHECK(send_payload_router_ == NULL);
+
+ send_payload_router_ = send_payload_router;
+ vcm_->RegisterProtectionCallback(vcm_protection_callback);
+ module_process_thread_->RegisterModule(vcm_.get());
+}
+
+void ViEEncoder::StopThreadsAndRemoveSharedMembers() {
+ if (bitrate_allocator_)
+ bitrate_allocator_->RemoveBitrateObserver(bitrate_observer_.get());
+ module_process_thread_->DeRegisterModule(vcm_.get());
+ module_process_thread_->DeRegisterModule(vpm_.get());
+}
+
+ViEEncoder::~ViEEncoder() {
+}
+
+void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ network_is_transmitting_ = is_transmitting;
+ }
+}
+
+void ViEEncoder::Pause() {
+ CriticalSectionScoped cs(data_cs_.get());
+ encoder_paused_ = true;
+}
+
+void ViEEncoder::Restart() {
+ CriticalSectionScoped cs(data_cs_.get());
+ encoder_paused_ = false;
+}
+
+uint8_t ViEEncoder::NumberOfCodecs() {
+ return vcm_->NumberOfCodecs();
+}
+
+int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) {
+ if (vcm_->Codec(list_index, video_codec) != 0) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
+ uint8_t pl_type,
+ bool internal_source) {
+ if (encoder == NULL)
+ return -1;
+
+ if (vcm_->RegisterExternalEncoder(encoder, pl_type, internal_source) !=
+ VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
+ if (vcm_->RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+ // Setting target width and height for VPM.
+ if (vpm_->SetTargetResolution(video_codec.width, video_codec.height,
+ video_codec.maxFramerate) != VPM_OK) {
+ return -1;
+ }
+
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ simulcast_enabled_ = video_codec.numberOfSimulcastStreams > 1;
+ }
+
+ // Add a bitrate observer to the allocator and update the start, max and
+ // min bitrates of the bitrate controller as needed.
+ int allocated_bitrate_bps = bitrate_allocator_->AddBitrateObserver(
+ bitrate_observer_.get(), video_codec.minBitrate * 1000,
+ video_codec.maxBitrate * 1000);
+
+ webrtc::VideoCodec modified_video_codec = video_codec;
+ modified_video_codec.startBitrate = allocated_bitrate_bps / 1000;
+
+ size_t max_data_payload_length = send_payload_router_->MaxPayloadLength();
+ if (vcm_->RegisterSendCodec(&modified_video_codec, number_of_cores_,
+ static_cast<uint32_t>(max_data_payload_length)) !=
+ VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) {
+ *video_codec = vcm_->GetSendCodec();
+ return 0;
+}
+
+int32_t ViEEncoder::ScaleInputImage(bool enable) {
+ VideoFrameResampling resampling_mode = kFastRescaling;
+ // TODO(mflodman) What?
+ if (enable) {
+ // kInterpolation is currently not supported.
+ LOG_F(LS_ERROR) << "Not supported.";
+ return -1;
+ }
+ vpm_->SetInputFrameResampleMode(resampling_mode);
+
+ return 0;
+}
+
+int ViEEncoder::GetPaddingNeededBps() const {
+ int64_t time_of_last_frame_activity_ms;
+ int min_transmit_bitrate_bps;
+ int bitrate_bps;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ bool send_padding = simulcast_enabled_ || video_suspended_ ||
+ min_transmit_bitrate_kbps_ > 0;
+ if (!send_padding)
+ return 0;
+ time_of_last_frame_activity_ms = time_of_last_frame_activity_ms_;
+ min_transmit_bitrate_bps = 1000 * min_transmit_bitrate_kbps_;
+ bitrate_bps = last_observed_bitrate_bps_;
+ }
+
+ VideoCodec send_codec;
+ if (vcm_->SendCodec(&send_codec) != 0)
+ return 0;
+
+ bool video_is_suspended = vcm_->VideoSuspended();
+
+ // Find the max amount of padding we can allow ourselves to send at this
+ // point, based on which streams are currently active and what our current
+ // available bandwidth is.
+ int pad_up_to_bitrate_bps = 0;
+ if (send_codec.numberOfSimulcastStreams == 0) {
+ pad_up_to_bitrate_bps = send_codec.minBitrate * 1000;
+ } else {
+ SimulcastStream* stream_configs = send_codec.simulcastStream;
+ pad_up_to_bitrate_bps =
+ stream_configs[send_codec.numberOfSimulcastStreams - 1].minBitrate *
+ 1000;
+ for (int i = 0; i < send_codec.numberOfSimulcastStreams - 1; ++i) {
+ pad_up_to_bitrate_bps += stream_configs[i].targetBitrate * 1000;
+ }
+ }
+
+ // Disable padding if only sending one stream and video isn't suspended and
+ // min-transmit bitrate isn't used (applied later).
+ if (!video_is_suspended && send_codec.numberOfSimulcastStreams <= 1)
+ pad_up_to_bitrate_bps = 0;
+
+ // The amount of padding should decay to zero if no frames are being
+ // captured/encoded unless a min-transmit bitrate is used.
+ int64_t now_ms = TickTime::MillisecondTimestamp();
+ if (now_ms - time_of_last_frame_activity_ms > kStopPaddingThresholdMs)
+ pad_up_to_bitrate_bps = 0;
+
+ // Pad up to min bitrate.
+ if (pad_up_to_bitrate_bps < min_transmit_bitrate_bps)
+ pad_up_to_bitrate_bps = min_transmit_bitrate_bps;
+
+ // Padding may never exceed bitrate estimate.
+ if (pad_up_to_bitrate_bps > bitrate_bps)
+ pad_up_to_bitrate_bps = bitrate_bps;
+
+ return pad_up_to_bitrate_bps;
+}
+
+bool ViEEncoder::EncoderPaused() const {
+ // Pause video if paused by caller or as long as the network is down or the
+ // pacer queue has grown too large in buffered mode.
+ if (encoder_paused_) {
+ return true;
+ }
+ if (target_delay_ms_ > 0) {
+ // Buffered mode.
+ // TODO(pwestin): Workaround until nack is configured as a time and not
+ // number of packets.
+ return pacer_->QueueInMs() >=
+ std::max(
+ static_cast<int>(target_delay_ms_ * kEncoderPausePacerMargin),
+ kMinPacingDelayMs);
+ }
+ if (pacer_->ExpectedQueueTimeMs() > PacedSender::kDefaultMaxQueueLengthMs) {
+ // Too much data in pacer queue, drop frame.
+ return true;
+ }
+ return !network_is_transmitting_;
+}
+
+void ViEEncoder::TraceFrameDropStart() {
+ // Start trace event only on the first frame after encoder is paused.
+ if (!encoder_paused_and_dropped_frame_) {
+ TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this);
+ }
+ encoder_paused_and_dropped_frame_ = true;
+ return;
+}
+
+void ViEEncoder::TraceFrameDropEnd() {
+ // End trace event on first frame after encoder resumes, if frame was dropped.
+ if (encoder_paused_and_dropped_frame_) {
+ TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this);
+ }
+ encoder_paused_and_dropped_frame_ = false;
+}
+
+void ViEEncoder::DeliverFrame(VideoFrame video_frame) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+ if (!send_payload_router_->active()) {
+ // We've paused or we have no channels attached, don't waste resources on
+ // encoding.
+ return;
+ }
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+ if (EncoderPaused()) {
+ TraceFrameDropStart();
+ return;
+ }
+ TraceFrameDropEnd();
+ }
+
+ TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
+ "Encode");
+ VideoFrame* decimated_frame = NULL;
+ // TODO(wuchengli): support texture frames.
+ if (video_frame.native_handle() == NULL) {
+ // Pass frame via preprocessor.
+ const int ret = vpm_->PreprocessFrame(video_frame, &decimated_frame);
+ if (ret == 1) {
+ // Drop this frame.
+ return;
+ }
+ if (ret != VPM_OK) {
+ return;
+ }
+ }
+
+ // If we haven't resampled the frame and we have a FrameCallback, we need to
+ // make a deep copy of |video_frame|.
+ VideoFrame copied_frame;
+ if (pre_encode_callback_) {
+ // If the frame was not resampled or scaled => use copy of original.
+ if (decimated_frame == NULL) {
+ copied_frame.CopyFrame(video_frame);
+ decimated_frame = &copied_frame;
+ }
+ pre_encode_callback_->FrameCallback(decimated_frame);
+ }
+
+ // If the frame was not resampled, scaled, or touched by FrameCallback => use
+ // original. The frame is const from here.
+ const VideoFrame* output_frame =
+ (decimated_frame != NULL) ? decimated_frame : &video_frame;
+
+#ifdef VIDEOCODEC_VP8
+ if (vcm_->SendCodec() == webrtc::kVideoCodecVP8) {
+ webrtc::CodecSpecificInfo codec_specific_info;
+ codec_specific_info.codecType = webrtc::kVideoCodecVP8;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ codec_specific_info.codecSpecific.VP8.hasReceivedRPSI =
+ has_received_rpsi_;
+ codec_specific_info.codecSpecific.VP8.hasReceivedSLI =
+ has_received_sli_;
+ codec_specific_info.codecSpecific.VP8.pictureIdRPSI =
+ picture_id_rpsi_;
+ codec_specific_info.codecSpecific.VP8.pictureIdSLI =
+ picture_id_sli_;
+ has_received_sli_ = false;
+ has_received_rpsi_ = false;
+ }
+
+ vcm_->AddVideoFrame(*output_frame, vpm_->ContentMetrics(),
+ &codec_specific_info);
+ return;
+ }
+#endif
+ vcm_->AddVideoFrame(*output_frame);
+}
+
+int ViEEncoder::SendKeyFrame() {
+ return vcm_->IntraFrameRequest(0);
+}
+
+uint32_t ViEEncoder::LastObservedBitrateBps() const {
+ CriticalSectionScoped cs(data_cs_.get());
+ return last_observed_bitrate_bps_;
+}
+
+int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
+ if (vcm_->Bitrate(bitrate) != 0)
+ return -1;
+ return 0;
+}
+
+int32_t ViEEncoder::UpdateProtectionMethod(bool nack, bool fec) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+
+ if (fec_enabled_ == fec && nack_enabled_ == nack) {
+ // No change needed, we're already in correct state.
+ return 0;
+ }
+ fec_enabled_ = fec;
+ nack_enabled_ = nack;
+
+ // Set Video Protection for VCM.
+ VCMVideoProtection protection_mode;
+ if (fec_enabled_) {
+ protection_mode =
+ nack_enabled_ ? webrtc::kProtectionNackFEC : kProtectionFEC;
+ } else {
+ protection_mode = nack_enabled_ ? kProtectionNack : kProtectionNone;
+ }
+ vcm_->SetVideoProtection(protection_mode, true);
+
+ if (fec_enabled_ || nack_enabled_) {
+ // The send codec must be registered to set correct MTU.
+ webrtc::VideoCodec codec;
+ if (vcm_->SendCodec(&codec) == 0) {
+ uint32_t current_bitrate_bps = 0;
+ if (vcm_->Bitrate(&current_bitrate_bps) != 0) {
+ LOG_F(LS_WARNING) <<
+ "Failed to get the current encoder target bitrate.";
+ }
+ // Convert to start bitrate in kbps.
+ codec.startBitrate = (current_bitrate_bps + 500) / 1000;
+ size_t max_payload_length = send_payload_router_->MaxPayloadLength();
+ if (vcm_->RegisterSendCodec(&codec, number_of_cores_,
+ static_cast<uint32_t>(max_payload_length)) !=
+ 0) {
+ return -1;
+ }
+ }
+ }
+ return 0;
+}
+
+void ViEEncoder::SetSenderBufferingMode(int target_delay_ms) {
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ target_delay_ms_ = target_delay_ms;
+ }
+ if (target_delay_ms > 0) {
+ // Disable external frame-droppers.
+ vcm_->EnableFrameDropper(false);
+ vpm_->EnableTemporalDecimation(false);
+ } else {
+ // Real-time mode - enable frame droppers.
+ vpm_->EnableTemporalDecimation(true);
+ vcm_->EnableFrameDropper(true);
+ }
+}
+
+void ViEEncoder::OnSetRates(uint32_t bitrate_bps, int framerate) {
+ if (stats_proxy_)
+ stats_proxy_->OnSetRates(bitrate_bps, framerate);
+}
+
+int32_t ViEEncoder::SendData(
+ const uint8_t payload_type,
+ const EncodedImage& encoded_image,
+ const webrtc::RTPFragmentationHeader& fragmentation_header,
+ const RTPVideoHeader* rtp_video_hdr) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+ }
+
+ if (stats_proxy_ != NULL)
+ stats_proxy_->OnSendEncodedImage(encoded_image, rtp_video_hdr);
+
+ return send_payload_router_->RoutePayload(
+ encoded_image._frameType, payload_type, encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, encoded_image._buffer,
+ encoded_image._length, &fragmentation_header, rtp_video_hdr)
+ ? 0
+ : -1;
+}
+
+int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
+ const uint32_t frame_rate) {
+ if (stats_proxy_)
+ stats_proxy_->OnOutgoingRate(frame_rate, bit_rate);
+ return 0;
+}
+
+void ViEEncoder::OnReceivedSLI(uint32_t /*ssrc*/,
+ uint8_t picture_id) {
+ CriticalSectionScoped cs(data_cs_.get());
+ picture_id_sli_ = picture_id;
+ has_received_sli_ = true;
+}
+
+void ViEEncoder::OnReceivedRPSI(uint32_t /*ssrc*/,
+ uint64_t picture_id) {
+ CriticalSectionScoped cs(data_cs_.get());
+ picture_id_rpsi_ = picture_id;
+ has_received_rpsi_ = true;
+}
+
+void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ // Key frame request from remote side, signal to VCM.
+ TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
+
+ int idx = 0;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ auto stream_it = ssrc_streams_.find(ssrc);
+ if (stream_it == ssrc_streams_.end()) {
+ LOG_F(LS_WARNING) << "ssrc not found: " << ssrc << ", map size "
+ << ssrc_streams_.size();
+ return;
+ }
+ std::map<unsigned int, int64_t>::iterator time_it =
+ time_last_intra_request_ms_.find(ssrc);
+ if (time_it == time_last_intra_request_ms_.end()) {
+ time_last_intra_request_ms_[ssrc] = 0;
+ }
+
+ int64_t now = TickTime::MillisecondTimestamp();
+ if (time_last_intra_request_ms_[ssrc] + kViEMinKeyRequestIntervalMs > now) {
+ return;
+ }
+ time_last_intra_request_ms_[ssrc] = now;
+ idx = stream_it->second;
+ }
+ // Release the critsect before triggering key frame.
+ vcm_->IntraFrameRequest(idx);
+}
+
+void ViEEncoder::OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
+ CriticalSectionScoped cs(data_cs_.get());
+ std::map<unsigned int, int>::iterator it = ssrc_streams_.find(old_ssrc);
+ if (it == ssrc_streams_.end()) {
+ return;
+ }
+
+ ssrc_streams_[new_ssrc] = it->second;
+ ssrc_streams_.erase(it);
+
+ std::map<unsigned int, int64_t>::iterator time_it =
+ time_last_intra_request_ms_.find(old_ssrc);
+ int64_t last_intra_request_ms = 0;
+ if (time_it != time_last_intra_request_ms_.end()) {
+ last_intra_request_ms = time_it->second;
+ time_last_intra_request_ms_.erase(time_it);
+ }
+ time_last_intra_request_ms_[new_ssrc] = last_intra_request_ms;
+}
+
+bool ViEEncoder::SetSsrcs(const std::vector<uint32_t>& ssrcs) {
+ VideoCodec codec;
+ if (vcm_->SendCodec(&codec) != 0)
+ return false;
+
+ if (codec.numberOfSimulcastStreams > 0 &&
+ ssrcs.size() != codec.numberOfSimulcastStreams) {
+ return false;
+ }
+
+ CriticalSectionScoped cs(data_cs_.get());
+ ssrc_streams_.clear();
+ time_last_intra_request_ms_.clear();
+ int idx = 0;
+ for (uint32_t ssrc : ssrcs) {
+ ssrc_streams_[ssrc] = idx++;
+ }
+ return true;
+}
+
+void ViEEncoder::SetMinTransmitBitrate(int min_transmit_bitrate_kbps) {
+ assert(min_transmit_bitrate_kbps >= 0);
+ CriticalSectionScoped crit(data_cs_.get());
+ min_transmit_bitrate_kbps_ = min_transmit_bitrate_kbps;
+}
+
+// Called from ViEBitrateObserver.
+void ViEEncoder::OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms) {
+ LOG(LS_VERBOSE) << "OnNetworkChanged, bitrate" << bitrate_bps
+ << " packet loss " << static_cast<int>(fraction_lost)
+ << " rtt " << round_trip_time_ms;
+ RTC_DCHECK(send_payload_router_ != NULL);
+ vcm_->SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
+ bool video_is_suspended = vcm_->VideoSuspended();
+
+ VideoCodec send_codec;
+ if (vcm_->SendCodec(&send_codec) != 0) {
+ return;
+ }
+ SimulcastStream* stream_configs = send_codec.simulcastStream;
+ // Allocate the bandwidth between the streams.
+ std::vector<uint32_t> stream_bitrates = AllocateStreamBitrates(
+ bitrate_bps, stream_configs, send_codec.numberOfSimulcastStreams);
+ send_payload_router_->SetTargetSendBitrates(stream_bitrates);
+
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ last_observed_bitrate_bps_ = bitrate_bps;
+ if (video_suspended_ == video_is_suspended)
+ return;
+ video_suspended_ = video_is_suspended;
+
+ LOG(LS_INFO) << "Video suspend state changed " << video_is_suspended
+ << " for ssrc " << ssrc_streams_.begin()->first;
+ }
+ // Video suspend-state changed, inform codec observer.
+ if (stats_proxy_)
+ stats_proxy_->OnSuspendChange(video_is_suspended);
+}
+
+void ViEEncoder::SuspendBelowMinBitrate() {
+ vcm_->SuspendBelowMinBitrate();
+ bitrate_allocator_->EnforceMinBitrate(false);
+}
+
+void ViEEncoder::RegisterPostEncodeImageCallback(
+ EncodedImageCallback* post_encode_callback) {
+ vcm_->RegisterPostEncodeImageCallback(post_encode_callback);
+}
+
+QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessingModule* vpm)
+ : vpm_(vpm) {
+}
+
+QMVideoSettingsCallback::~QMVideoSettingsCallback() {
+}
+
+int32_t QMVideoSettingsCallback::SetVideoQMSettings(
+ const uint32_t frame_rate,
+ const uint32_t width,
+ const uint32_t height) {
+ return vpm_->SetTargetResolution(width, height, frame_rate);
+}
+
+void QMVideoSettingsCallback::SetTargetFramerate(int frame_rate) {
+ vpm_->SetTargetFramerate(frame_rate);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/vie_encoder.h b/webrtc/video_engine/vie_encoder.h
new file mode 100644
index 0000000000..54aacdbfa9
--- /dev/null
+++ b/webrtc/video_engine/vie_encoder.h
@@ -0,0 +1,201 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_types.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_allocator.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
+#include "webrtc/modules/video_processing/main/interface/video_processing.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video/video_capture_input.h"
+#include "webrtc/video_engine/vie_defines.h"
+
+namespace webrtc {
+
+class Config;
+class CriticalSectionWrapper;
+class EncodedImageCallback;
+class PacedSender;
+class PayloadRouter;
+class ProcessThread;
+class QMVideoSettingsCallback;
+class SendStatisticsProxy;
+class ViEBitrateObserver;
+class ViEEffectFilter;
+class VideoCodingModule;
+
+class ViEEncoder : public RtcpIntraFrameObserver,
+ public VideoEncoderRateObserver,
+ public VCMPacketizationCallback,
+ public VCMSendStatisticsCallback,
+ public VideoCaptureCallback {
+ public:
+ friend class ViEBitrateObserver;
+
+ ViEEncoder(uint32_t number_of_cores,
+ ProcessThread* module_process_thread,
+ SendStatisticsProxy* stats_proxy,
+ I420FrameCallback* pre_encode_callback,
+ PacedSender* pacer,
+ BitrateAllocator* bitrate_allocator);
+ ~ViEEncoder();
+
+ bool Init();
+
+ // This function is assumed to be called before any frames are delivered and
+ // only once.
+ // Ideally this would be done in Init, but the dependencies between ViEEncoder
+ // and ViEChannel makes it really hard to do in a good way.
+ void StartThreadsAndSetSharedMembers(
+ rtc::scoped_refptr<PayloadRouter> send_payload_router,
+ VCMProtectionCallback* vcm_protection_callback);
+
+ // This function must be called before the corresponding ViEChannel is
+ // deleted.
+ void StopThreadsAndRemoveSharedMembers();
+
+ void SetNetworkTransmissionState(bool is_transmitting);
+
+ // Returns the id of the owning channel.
+ int Owner() const;
+
+ // Drops incoming packets before they get to the encoder.
+ void Pause();
+ void Restart();
+
+ // Codec settings.
+ uint8_t NumberOfCodecs();
+ int32_t GetCodec(uint8_t list_index, VideoCodec* video_codec);
+ int32_t RegisterExternalEncoder(VideoEncoder* encoder,
+ uint8_t pl_type,
+ bool internal_source);
+ int32_t DeRegisterExternalEncoder(uint8_t pl_type);
+ int32_t SetEncoder(const VideoCodec& video_codec);
+ int32_t GetEncoder(VideoCodec* video_codec);
+
+ // Scale or crop/pad image.
+ int32_t ScaleInputImage(bool enable);
+
+ // Implementing VideoCaptureCallback.
+ void DeliverFrame(VideoFrame video_frame) override;
+
+ int32_t SendKeyFrame();
+
+ uint32_t LastObservedBitrateBps() const;
+ int CodecTargetBitrate(uint32_t* bitrate) const;
+ // Loss protection.
+ int32_t UpdateProtectionMethod(bool nack, bool fec);
+ bool nack_enabled() const { return nack_enabled_; }
+
+ // Buffering mode.
+ void SetSenderBufferingMode(int target_delay_ms);
+
+ // Implements VideoEncoderRateObserver.
+ void OnSetRates(uint32_t bitrate_bps, int framerate) override;
+
+ // Implements VCMPacketizationCallback.
+ int32_t SendData(uint8_t payload_type,
+ const EncodedImage& encoded_image,
+ const RTPFragmentationHeader& fragmentation_header,
+ const RTPVideoHeader* rtp_video_hdr) override;
+
+ // Implements VideoSendStatisticsCallback.
+ int32_t SendStatistics(const uint32_t bit_rate,
+ const uint32_t frame_rate) override;
+
+ // Implements RtcpIntraFrameObserver.
+ void OnReceivedIntraFrameRequest(uint32_t ssrc) override;
+ void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) override;
+ void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) override;
+ void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) override;
+
+ // Sets SSRCs for all streams.
+ bool SetSsrcs(const std::vector<uint32_t>& ssrcs);
+
+ void SetMinTransmitBitrate(int min_transmit_bitrate_kbps);
+
+ // Lets the sender suspend video when the rate drops below
+ // |threshold_bps|, and turns back on when the rate goes back up above
+ // |threshold_bps| + |window_bps|.
+ void SuspendBelowMinBitrate();
+
+ // New-style callbacks, used by VideoSendStream.
+ void RegisterPostEncodeImageCallback(
+ EncodedImageCallback* post_encode_callback);
+
+ int GetPaddingNeededBps() const;
+
+ protected:
+ // Called by BitrateObserver.
+ void OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms);
+
+ private:
+ bool EncoderPaused() const EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+ void TraceFrameDropStart() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+ void TraceFrameDropEnd() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+
+ const uint32_t number_of_cores_;
+
+ const rtc::scoped_ptr<VideoProcessingModule> vpm_;
+ const rtc::scoped_ptr<QMVideoSettingsCallback> qm_callback_;
+ const rtc::scoped_ptr<VideoCodingModule> vcm_;
+ rtc::scoped_refptr<PayloadRouter> send_payload_router_;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
+ rtc::scoped_ptr<BitrateObserver> bitrate_observer_;
+
+ SendStatisticsProxy* const stats_proxy_;
+ I420FrameCallback* const pre_encode_callback_;
+ PacedSender* const pacer_;
+ BitrateAllocator* const bitrate_allocator_;
+
+ // The time we last received an input frame or encoded frame. This is used to
+ // track when video is stopped long enough that we also want to stop sending
+ // padding.
+ int64_t time_of_last_frame_activity_ms_ GUARDED_BY(data_cs_);
+ bool simulcast_enabled_ GUARDED_BY(data_cs_);
+ int min_transmit_bitrate_kbps_ GUARDED_BY(data_cs_);
+ uint32_t last_observed_bitrate_bps_ GUARDED_BY(data_cs_);
+ int target_delay_ms_ GUARDED_BY(data_cs_);
+ bool network_is_transmitting_ GUARDED_BY(data_cs_);
+ bool encoder_paused_ GUARDED_BY(data_cs_);
+ bool encoder_paused_and_dropped_frame_ GUARDED_BY(data_cs_);
+ std::map<unsigned int, int64_t> time_last_intra_request_ms_
+ GUARDED_BY(data_cs_);
+
+ bool fec_enabled_;
+ bool nack_enabled_;
+
+ ProcessThread* module_process_thread_;
+
+ bool has_received_sli_ GUARDED_BY(data_cs_);
+ uint8_t picture_id_sli_ GUARDED_BY(data_cs_);
+ bool has_received_rpsi_ GUARDED_BY(data_cs_);
+ uint64_t picture_id_rpsi_ GUARDED_BY(data_cs_);
+ std::map<uint32_t, int> ssrc_streams_ GUARDED_BY(data_cs_);
+
+ bool video_suspended_ GUARDED_BY(data_cs_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
diff --git a/webrtc/video_engine/vie_receiver.cc b/webrtc/video_engine/vie_receiver.cc
new file mode 100644
index 0000000000..2e3b588302
--- /dev/null
+++ b/webrtc/video_engine/vie_receiver.cc
@@ -0,0 +1,482 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/vie_receiver.h"
+
+#include <vector>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/interface/fec_receiver.h"
+#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/main/interface/video_coding.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+static const int kPacketLogIntervalMs = 10000;
+
+ViEReceiver::ViEReceiver(VideoCodingModule* module_vcm,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpFeedback* rtp_feedback)
+ : receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ clock_(Clock::GetRealTimeClock()),
+ rtp_header_parser_(RtpHeaderParser::Create()),
+ rtp_payload_registry_(
+ new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(false))),
+ rtp_receiver_(
+ RtpReceiver::CreateVideoReceiver(clock_,
+ this,
+ rtp_feedback,
+ rtp_payload_registry_.get())),
+ rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
+ fec_receiver_(FecReceiver::Create(this)),
+ rtp_rtcp_(NULL),
+ vcm_(module_vcm),
+ remote_bitrate_estimator_(remote_bitrate_estimator),
+ ntp_estimator_(new RemoteNtpTimeEstimator(clock_)),
+ receiving_(false),
+ restored_packet_in_use_(false),
+ receiving_ast_enabled_(false),
+ receiving_cvo_enabled_(false),
+ receiving_tsn_enabled_(false),
+ last_packet_log_ms_(-1) {
+ assert(remote_bitrate_estimator);
+}
+
+ViEReceiver::~ViEReceiver() {
+ UpdateHistograms();
+}
+
+void ViEReceiver::UpdateHistograms() {
+ FecPacketCounter counter = fec_receiver_->GetPacketCounter();
+ if (counter.num_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE(
+ "WebRTC.Video.ReceivedFecPacketsInPercent",
+ static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
+ }
+ if (counter.num_fec_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
+ static_cast<int>(counter.num_recovered_packets *
+ 100 / counter.num_fec_packets));
+ }
+}
+
+bool ViEReceiver::SetReceiveCodec(const VideoCodec& video_codec) {
+ int8_t old_pltype = -1;
+ if (rtp_payload_registry_->ReceivePayloadType(video_codec.plName,
+ kVideoPayloadTypeFrequency,
+ 0,
+ video_codec.maxBitrate,
+ &old_pltype) != -1) {
+ rtp_payload_registry_->DeRegisterReceivePayload(old_pltype);
+ }
+
+ return RegisterPayload(video_codec);
+}
+
+bool ViEReceiver::RegisterPayload(const VideoCodec& video_codec) {
+ return rtp_receiver_->RegisterReceivePayload(video_codec.plName,
+ video_codec.plType,
+ kVideoPayloadTypeFrequency,
+ 0,
+ video_codec.maxBitrate) == 0;
+}
+
+void ViEReceiver::SetNackStatus(bool enable,
+ int max_nack_reordering_threshold) {
+ if (!enable) {
+ // Reset the threshold back to the lower default threshold when NACK is
+ // disabled since we no longer will be receiving retransmissions.
+ max_nack_reordering_threshold = kDefaultMaxReorderingThreshold;
+ }
+ rtp_receive_statistics_->SetMaxReorderingThreshold(
+ max_nack_reordering_threshold);
+ rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
+}
+
+void ViEReceiver::SetRtxPayloadType(int payload_type,
+ int associated_payload_type) {
+ rtp_payload_registry_->SetRtxPayloadType(payload_type,
+ associated_payload_type);
+}
+
+void ViEReceiver::SetUseRtxPayloadMappingOnRestore(bool val) {
+ rtp_payload_registry_->set_use_rtx_payload_mapping_on_restore(val);
+}
+
+void ViEReceiver::SetRtxSsrc(uint32_t ssrc) {
+ rtp_payload_registry_->SetRtxSsrc(ssrc);
+}
+
+bool ViEReceiver::GetRtxSsrc(uint32_t* ssrc) const {
+ return rtp_payload_registry_->GetRtxSsrc(ssrc);
+}
+
+bool ViEReceiver::IsFecEnabled() const {
+ return rtp_payload_registry_->ulpfec_payload_type() > -1;
+}
+
+uint32_t ViEReceiver::GetRemoteSsrc() const {
+ return rtp_receiver_->SSRC();
+}
+
+int ViEReceiver::GetCsrcs(uint32_t* csrcs) const {
+ return rtp_receiver_->CSRCs(csrcs);
+}
+
+void ViEReceiver::SetRtpRtcpModule(RtpRtcp* module) {
+ rtp_rtcp_ = module;
+}
+
+RtpReceiver* ViEReceiver::GetRtpReceiver() const {
+ return rtp_receiver_.get();
+}
+
+void ViEReceiver::RegisterRtpRtcpModules(
+ const std::vector<RtpRtcp*>& rtp_modules) {
+ CriticalSectionScoped cs(receive_cs_.get());
+ // Only change the "simulcast" modules, the base module can be accessed
+ // without a lock whereas the simulcast modules require locking as they can be
+ // changed in runtime.
+ rtp_rtcp_simulcast_ =
+ std::vector<RtpRtcp*>(rtp_modules.begin() + 1, rtp_modules.end());
+}
+
+bool ViEReceiver::SetReceiveTimestampOffsetStatus(bool enable, int id) {
+ if (enable) {
+ return rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset, id);
+ } else {
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset);
+ }
+}
+
+bool ViEReceiver::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, id)) {
+ receiving_ast_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_ast_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime);
+ }
+}
+
+bool ViEReceiver::SetReceiveVideoRotationStatus(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionVideoRotation, id)) {
+ receiving_cvo_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_cvo_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionVideoRotation);
+ }
+}
+
+bool ViEReceiver::SetReceiveTransportSequenceNumber(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber, id)) {
+ receiving_tsn_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_tsn_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber);
+ }
+}
+
+int ViEReceiver::ReceivedRTPPacket(const void* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ return InsertRTPPacket(static_cast<const uint8_t*>(rtp_packet),
+ rtp_packet_length, packet_time);
+}
+
+int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
+ size_t rtcp_packet_length) {
+ return InsertRTCPPacket(static_cast<const uint8_t*>(rtcp_packet),
+ rtcp_packet_length);
+}
+
+int32_t ViEReceiver::OnReceivedPayloadData(const uint8_t* payload_data,
+ const size_t payload_size,
+ const WebRtcRTPHeader* rtp_header) {
+ WebRtcRTPHeader rtp_header_with_ntp = *rtp_header;
+ rtp_header_with_ntp.ntp_time_ms =
+ ntp_estimator_->Estimate(rtp_header->header.timestamp);
+ if (vcm_->IncomingPacket(payload_data,
+ payload_size,
+ rtp_header_with_ntp) != 0) {
+ // Check this...
+ return -1;
+ }
+ return 0;
+}
+
+bool ViEReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length) {
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
+ return false;
+ }
+ header.payload_type_frequency = kVideoPayloadTypeFrequency;
+ bool in_order = IsPacketInOrder(header);
+ return ReceivePacket(rtp_packet, rtp_packet_length, header, in_order);
+}
+
+int ViEReceiver::InsertRTPPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ {
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (!receiving_) {
+ return -1;
+ }
+ }
+
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length,
+ &header)) {
+ return -1;
+ }
+ size_t payload_length = rtp_packet_length - header.headerLength;
+ int64_t arrival_time_ms;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (packet_time.timestamp != -1)
+ arrival_time_ms = (packet_time.timestamp + 500) / 1000;
+ else
+ arrival_time_ms = now_ms;
+
+ {
+ // Periodically log the RTP header of incoming packets.
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
+ std::stringstream ss;
+ ss << "Packet received on SSRC: " << header.ssrc << " with payload type: "
+ << static_cast<int>(header.payloadType) << ", timestamp: "
+ << header.timestamp << ", sequence number: " << header.sequenceNumber
+ << ", arrival time: " << arrival_time_ms;
+ if (header.extension.hasTransmissionTimeOffset)
+ ss << ", toffset: " << header.extension.transmissionTimeOffset;
+ if (header.extension.hasAbsoluteSendTime)
+ ss << ", abs send time: " << header.extension.absoluteSendTime;
+ LOG(LS_INFO) << ss.str();
+ last_packet_log_ms_ = now_ms;
+ }
+ }
+
+ remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_length,
+ header, true);
+ header.payload_type_frequency = kVideoPayloadTypeFrequency;
+
+ bool in_order = IsPacketInOrder(header);
+ rtp_payload_registry_->SetIncomingPayloadType(header);
+ int ret = ReceivePacket(rtp_packet, rtp_packet_length, header, in_order)
+ ? 0
+ : -1;
+ // Update receive statistics after ReceivePacket.
+ // Receive statistics will be reset if the payload type changes (make sure
+ // that the first packet is included in the stats).
+ rtp_receive_statistics_->IncomingPacket(
+ header, rtp_packet_length, IsPacketRetransmitted(header, in_order));
+ return ret;
+}
+
+bool ViEReceiver::ReceivePacket(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header,
+ bool in_order) {
+ if (rtp_payload_registry_->IsEncapsulated(header)) {
+ return ParseAndHandleEncapsulatingHeader(packet, packet_length, header);
+ }
+ const uint8_t* payload = packet + header.headerLength;
+ assert(packet_length >= header.headerLength);
+ size_t payload_length = packet_length - header.headerLength;
+ PayloadUnion payload_specific;
+ if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
+ &payload_specific)) {
+ return false;
+ }
+ return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
+ payload_specific, in_order);
+}
+
+bool ViEReceiver::ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header) {
+ if (rtp_payload_registry_->IsRed(header)) {
+ int8_t ulpfec_pt = rtp_payload_registry_->ulpfec_payload_type();
+ if (packet[header.headerLength] == ulpfec_pt) {
+ rtp_receive_statistics_->FecPacketReceived(header, packet_length);
+ // Notify vcm about received FEC packets to avoid NACKing these packets.
+ NotifyReceiverOfFecPacket(header);
+ }
+ if (fec_receiver_->AddReceivedRedPacket(
+ header, packet, packet_length, ulpfec_pt) != 0) {
+ return false;
+ }
+ return fec_receiver_->ProcessReceivedFec() == 0;
+ } else if (rtp_payload_registry_->IsRtx(header)) {
+ if (header.headerLength + header.paddingLength == packet_length) {
+ // This is an empty packet and should be silently dropped before trying to
+ // parse the RTX header.
+ return true;
+ }
+ // Remove the RTX header and parse the original RTP header.
+ if (packet_length < header.headerLength)
+ return false;
+ if (packet_length > sizeof(restored_packet_))
+ return false;
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (restored_packet_in_use_) {
+ LOG(LS_WARNING) << "Multiple RTX headers detected, dropping packet.";
+ return false;
+ }
+ if (!rtp_payload_registry_->RestoreOriginalPacket(
+ restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
+ header)) {
+ LOG(LS_WARNING) << "Incoming RTX packet: Invalid RTP header";
+ return false;
+ }
+ restored_packet_in_use_ = true;
+ bool ret = OnRecoveredPacket(restored_packet_, packet_length);
+ restored_packet_in_use_ = false;
+ return ret;
+ }
+ return false;
+}
+
+void ViEReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) {
+ int8_t last_media_payload_type =
+ rtp_payload_registry_->last_received_media_payload_type();
+ if (last_media_payload_type < 0) {
+ LOG(LS_WARNING) << "Failed to get last media payload type.";
+ return;
+ }
+ // Fake an empty media packet.
+ WebRtcRTPHeader rtp_header = {};
+ rtp_header.header = header;
+ rtp_header.header.payloadType = last_media_payload_type;
+ rtp_header.header.paddingLength = 0;
+ PayloadUnion payload_specific;
+ if (!rtp_payload_registry_->GetPayloadSpecifics(last_media_payload_type,
+ &payload_specific)) {
+ LOG(LS_WARNING) << "Failed to get payload specifics.";
+ return;
+ }
+ rtp_header.type.Video.codec = payload_specific.Video.videoCodecType;
+ rtp_header.type.Video.rotation = kVideoRotation_0;
+ if (header.extension.hasVideoRotation) {
+ rtp_header.type.Video.rotation =
+ ConvertCVOByteToVideoRotation(header.extension.videoRotation);
+ }
+ OnReceivedPayloadData(NULL, 0, &rtp_header);
+}
+
+int ViEReceiver::InsertRTCPPacket(const uint8_t* rtcp_packet,
+ size_t rtcp_packet_length) {
+ {
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (!receiving_) {
+ return -1;
+ }
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_simulcast_)
+ rtp_rtcp->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+ }
+ assert(rtp_rtcp_); // Should be set by owner at construction time.
+ int ret = rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+ if (ret != 0) {
+ return ret;
+ }
+
+ int64_t rtt = 0;
+ rtp_rtcp_->RTT(rtp_receiver_->SSRC(), &rtt, NULL, NULL, NULL);
+ if (rtt == 0) {
+ // Waiting for valid rtt.
+ return 0;
+ }
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
+ &rtp_timestamp)) {
+ // Waiting for RTCP.
+ return 0;
+ }
+ ntp_estimator_->UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
+
+ return 0;
+}
+
+void ViEReceiver::StartReceive() {
+ CriticalSectionScoped cs(receive_cs_.get());
+ receiving_ = true;
+}
+
+void ViEReceiver::StopReceive() {
+ CriticalSectionScoped cs(receive_cs_.get());
+ receiving_ = false;
+}
+
+ReceiveStatistics* ViEReceiver::GetReceiveStatistics() const {
+ return rtp_receive_statistics_.get();
+}
+
+bool ViEReceiver::IsPacketInOrder(const RTPHeader& header) const {
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(header.ssrc);
+ if (!statistician)
+ return false;
+ return statistician->IsPacketInOrder(header.sequenceNumber);
+}
+
+bool ViEReceiver::IsPacketRetransmitted(const RTPHeader& header,
+ bool in_order) const {
+ // Retransmissions are handled separately if RTX is enabled.
+ if (rtp_payload_registry_->RtxEnabled())
+ return false;
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(header.ssrc);
+ if (!statistician)
+ return false;
+ // Check if this is a retransmission.
+ int64_t min_rtt = 0;
+ rtp_rtcp_->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
+ return !in_order &&
+ statistician->IsRetransmitOfOldPacket(header, min_rtt);
+}
+} // namespace webrtc
diff --git a/webrtc/video_engine/vie_receiver.h b/webrtc/video_engine/vie_receiver.h
new file mode 100644
index 0000000000..cd069eaa5b
--- /dev/null
+++ b/webrtc/video_engine/vie_receiver.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
+
+#include <list>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_engine/vie_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class FecReceiver;
+class RemoteNtpTimeEstimator;
+class ReceiveStatistics;
+class RemoteBitrateEstimator;
+class RtpHeaderParser;
+class RTPPayloadRegistry;
+class RtpReceiver;
+class RtpRtcp;
+class VideoCodingModule;
+struct ReceiveBandwidthEstimatorStats;
+
+class ViEReceiver : public RtpData {
+ public:
+ ViEReceiver(VideoCodingModule* module_vcm,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpFeedback* rtp_feedback);
+ ~ViEReceiver();
+
+ bool SetReceiveCodec(const VideoCodec& video_codec);
+ bool RegisterPayload(const VideoCodec& video_codec);
+
+ void SetNackStatus(bool enable, int max_nack_reordering_threshold);
+ void SetRtxPayloadType(int payload_type, int associated_payload_type);
+ // If set to true, the RTX payload type mapping supplied in
+ // |SetRtxPayloadType| will be used when restoring RTX packets. Without it,
+ // RTX packets will always be restored to the last non-RTX packet payload type
+ // received.
+ void SetUseRtxPayloadMappingOnRestore(bool val);
+ void SetRtxSsrc(uint32_t ssrc);
+ bool GetRtxSsrc(uint32_t* ssrc) const;
+
+ bool IsFecEnabled() const;
+
+ uint32_t GetRemoteSsrc() const;
+ int GetCsrcs(uint32_t* csrcs) const;
+
+ void SetRtpRtcpModule(RtpRtcp* module);
+
+ RtpReceiver* GetRtpReceiver() const;
+
+ void RegisterRtpRtcpModules(const std::vector<RtpRtcp*>& rtp_modules);
+
+ bool SetReceiveTimestampOffsetStatus(bool enable, int id);
+ bool SetReceiveAbsoluteSendTimeStatus(bool enable, int id);
+ bool SetReceiveVideoRotationStatus(bool enable, int id);
+ bool SetReceiveTransportSequenceNumber(bool enable, int id);
+
+ void StartReceive();
+ void StopReceive();
+
+ // Receives packets from external transport.
+ int ReceivedRTPPacket(const void* rtp_packet, size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ int ReceivedRTCPPacket(const void* rtcp_packet, size_t rtcp_packet_length);
+
+ // Implements RtpData.
+ int32_t OnReceivedPayloadData(const uint8_t* payload_data,
+ const size_t payload_size,
+ const WebRtcRTPHeader* rtp_header) override;
+ bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override;
+
+ ReceiveStatistics* GetReceiveStatistics() const;
+ private:
+ int InsertRTPPacket(const uint8_t* rtp_packet, size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ bool ReceivePacket(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header,
+ bool in_order);
+ // Parses and handles for instance RTX and RED headers.
+ // This function assumes that it's being called from only one thread.
+ bool ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header);
+ void NotifyReceiverOfFecPacket(const RTPHeader& header);
+ int InsertRTCPPacket(const uint8_t* rtcp_packet, size_t rtcp_packet_length);
+ bool IsPacketInOrder(const RTPHeader& header) const;
+ bool IsPacketRetransmitted(const RTPHeader& header, bool in_order) const;
+ void UpdateHistograms();
+
+ rtc::scoped_ptr<CriticalSectionWrapper> receive_cs_;
+ Clock* clock_;
+ rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
+ rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
+ rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
+ const rtc::scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
+ rtc::scoped_ptr<FecReceiver> fec_receiver_;
+ RtpRtcp* rtp_rtcp_;
+ std::vector<RtpRtcp*> rtp_rtcp_simulcast_;
+ VideoCodingModule* vcm_;
+ RemoteBitrateEstimator* remote_bitrate_estimator_;
+
+ rtc::scoped_ptr<RemoteNtpTimeEstimator> ntp_estimator_;
+
+ bool receiving_;
+ uint8_t restored_packet_[kViEMaxMtu];
+ bool restored_packet_in_use_;
+ bool receiving_ast_enabled_;
+ bool receiving_cvo_enabled_;
+ bool receiving_tsn_enabled_;
+ int64_t last_packet_log_ms_;
+};
+
+} // namespace webrt
+
+#endif // WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
diff --git a/webrtc/video_engine/vie_remb.cc b/webrtc/video_engine/vie_remb.cc
new file mode 100644
index 0000000000..b347f2ee00
--- /dev/null
+++ b/webrtc/video_engine/vie_remb.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/vie_remb.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+const int kRembSendIntervalMs = 200;
+
+// % threshold for if we should send a new REMB asap.
+const unsigned int kSendThresholdPercent = 97;
+
+VieRemb::VieRemb()
+ : list_crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ last_remb_time_(TickTime::MillisecondTimestamp()),
+ last_send_bitrate_(0),
+ bitrate_(0) {}
+
+VieRemb::~VieRemb() {}
+
+void VieRemb::AddReceiveChannel(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ if (std::find(receive_modules_.begin(), receive_modules_.end(), rtp_rtcp) !=
+ receive_modules_.end())
+ return;
+
+ // The module probably doesn't have a remote SSRC yet, so don't add it to the
+ // map.
+ receive_modules_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveReceiveChannel(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ for (RtpModules::iterator it = receive_modules_.begin();
+ it != receive_modules_.end(); ++it) {
+ if ((*it) == rtp_rtcp) {
+ receive_modules_.erase(it);
+ break;
+ }
+ }
+}
+
+void VieRemb::AddRembSender(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+
+ // Verify this module hasn't been added earlier.
+ if (std::find(rtcp_sender_.begin(), rtcp_sender_.end(), rtp_rtcp) !=
+ rtcp_sender_.end())
+ return;
+ rtcp_sender_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveRembSender(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ for (RtpModules::iterator it = rtcp_sender_.begin();
+ it != rtcp_sender_.end(); ++it) {
+ if ((*it) == rtp_rtcp) {
+ rtcp_sender_.erase(it);
+ return;
+ }
+ }
+}
+
+bool VieRemb::InUse() const {
+ CriticalSectionScoped cs(list_crit_.get());
+ if (receive_modules_.empty() && rtcp_sender_.empty())
+ return false;
+ else
+ return true;
+}
+
+void VieRemb::OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) {
+ list_crit_->Enter();
+ // If we already have an estimate, check if the new total estimate is below
+ // kSendThresholdPercent of the previous estimate.
+ if (last_send_bitrate_ > 0) {
+ unsigned int new_remb_bitrate = last_send_bitrate_ - bitrate_ + bitrate;
+
+ if (new_remb_bitrate < kSendThresholdPercent * last_send_bitrate_ / 100) {
+ // The new bitrate estimate is less than kSendThresholdPercent % of the
+ // last report. Send a REMB asap.
+ last_remb_time_ = TickTime::MillisecondTimestamp() - kRembSendIntervalMs;
+ }
+ }
+ bitrate_ = bitrate;
+
+ // Calculate total receive bitrate estimate.
+ int64_t now = TickTime::MillisecondTimestamp();
+
+ if (now - last_remb_time_ < kRembSendIntervalMs) {
+ list_crit_->Leave();
+ return;
+ }
+ last_remb_time_ = now;
+
+ if (ssrcs.empty() || receive_modules_.empty()) {
+ list_crit_->Leave();
+ return;
+ }
+
+ // Send a REMB packet.
+ RtpRtcp* sender = NULL;
+ if (!rtcp_sender_.empty()) {
+ sender = rtcp_sender_.front();
+ } else {
+ sender = receive_modules_.front();
+ }
+ last_send_bitrate_ = bitrate_;
+
+ list_crit_->Leave();
+
+ if (sender) {
+ sender->SetREMBData(bitrate_, ssrcs);
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/vie_remb.h b/webrtc/video_engine/vie_remb.h
new file mode 100644
index 0000000000..9f38259ca8
--- /dev/null
+++ b/webrtc/video_engine/vie_remb.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_REMB_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_REMB_H_
+
+#include <list>
+#include <utility>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/interface/module.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class ProcessThread;
+class RtpRtcp;
+
+class VieRemb : public RemoteBitrateObserver {
+ public:
+ VieRemb();
+ ~VieRemb();
+
+ // Called to add a receive channel to include in the REMB packet.
+ void AddReceiveChannel(RtpRtcp* rtp_rtcp);
+
+ // Removes the specified channel from REMB estimate.
+ void RemoveReceiveChannel(RtpRtcp* rtp_rtcp);
+
+ // Called to add a module that can generate and send REMB RTCP.
+ void AddRembSender(RtpRtcp* rtp_rtcp);
+
+ // Removes a REMB RTCP sender.
+ void RemoveRembSender(RtpRtcp* rtp_rtcp);
+
+ // Returns true if the instance is in use, false otherwise.
+ bool InUse() const;
+
+ // Called every time there is a new bitrate estimate for a receive channel
+ // group. This call will trigger a new RTCP REMB packet if the bitrate
+ // estimate has decreased or if no RTCP REMB packet has been sent for
+ // a certain time interval.
+ // Implements RtpReceiveBitrateUpdate.
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate);
+
+ private:
+ typedef std::list<RtpRtcp*> RtpModules;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> list_crit_;
+
+ // The last time a REMB was sent.
+ int64_t last_remb_time_;
+ unsigned int last_send_bitrate_;
+
+ // All RtpRtcp modules to include in the REMB packet.
+ RtpModules receive_modules_;
+
+ // All modules that can send REMB RTCP.
+ RtpModules rtcp_sender_;
+
+ // The last bitrate update.
+ unsigned int bitrate_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_VIE_REMB_H_
diff --git a/webrtc/video_engine/vie_remb_unittest.cc b/webrtc/video_engine/vie_remb_unittest.cc
new file mode 100644
index 0000000000..3289c4b822
--- /dev/null
+++ b/webrtc/video_engine/vie_remb_unittest.cc
@@ -0,0 +1,251 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for ViERemb.
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video_engine/vie_remb.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::NiceMock;
+using ::testing::Return;
+
+namespace webrtc {
+
+class ViERembTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ TickTime::UseFakeClock(12345);
+ process_thread_.reset(new NiceMock<MockProcessThread>);
+ vie_remb_.reset(new VieRemb());
+ }
+ rtc::scoped_ptr<MockProcessThread> process_thread_;
+ rtc::scoped_ptr<VieRemb> vie_remb_;
+};
+
+TEST_F(ViERembTest, OneModuleTestForSendingRemb) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+
+ const unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower bitrate to send another REMB packet.
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate - 100, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate - 100);
+
+ vie_remb_->RemoveReceiveChannel(&rtp);
+ vie_remb_->RemoveRembSender(&rtp);
+}
+
+TEST_F(ViERembTest, LowerEstimateToSendRemb) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate with more than 3% to trigger a call to SetREMBData right
+ // away.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+}
+
+TEST_F(ViERembTest, VerifyIncreasingAndDecreasing) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate[] = { 456, 789 };
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
+
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[0], ssrcs))
+ .Times(1);
+ TickTime::AdvanceFakeClock(1000);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1] + 100);
+
+ // Lower the estimate to trigger a callback.
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[1], ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1]);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveRembSender(&rtp_0);
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, NoRembForIncreasedBitrate) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Increased estimate shouldn't trigger a callback right away.
+ EXPECT_CALL(rtp_0, SetREMBData(_, _))
+ .Times(0);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate + 1);
+
+ // Decreasing the estimate less than 3% shouldn't trigger a new callback.
+ EXPECT_CALL(rtp_0, SetREMBData(_, _))
+ .Times(0);
+ int lower_estimate = bitrate_estimate * 98 / 100;
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, lower_estimate);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveRembSender(&rtp_0);
+}
+
+TEST_F(ViERembTest, ChangeSendRtpModule) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Decrease estimate to trigger a REMB.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Remove the sending module, add it again -> should get remb on the second
+ // module.
+ vie_remb_->RemoveRembSender(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp_1, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, OnlyOneRembForDoubleProcess) {
+ MockRtpRtcp rtp;
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate, should trigger a call to SetREMBData right away.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Call OnReceiveBitrateChanged again, this should not trigger a new callback.
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(0);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ vie_remb_->RemoveReceiveChannel(&rtp);
+ vie_remb_->RemoveRembSender(&rtp);
+}
+
+// Only register receiving modules and make sure we fallback to trigger a REMB
+// packet on this one.
+TEST_F(ViERembTest, NoSendingRtpModule) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ TickTime::AdvanceFakeClock(1000);
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate to trigger a new packet REMB packet.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/vie_sync_module.cc b/webrtc/video_engine/vie_sync_module.cc
new file mode 100644
index 0000000000..1c5d877cd2
--- /dev/null
+++ b/webrtc/video_engine/vie_sync_module.cc
@@ -0,0 +1,188 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video_engine/vie_sync_module.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/main/interface/video_coding.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/video_engine/stream_synchronization.h"
+#include "webrtc/voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+int UpdateMeasurements(StreamSynchronization::Measurements* stream,
+ const RtpRtcp& rtp_rtcp, const RtpReceiver& receiver) {
+ if (!receiver.Timestamp(&stream->latest_timestamp))
+ return -1;
+ if (!receiver.LastReceivedTimeMs(&stream->latest_receive_time_ms))
+ return -1;
+
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp.RemoteNTP(&ntp_secs,
+ &ntp_frac,
+ NULL,
+ NULL,
+ &rtp_timestamp)) {
+ return -1;
+ }
+
+ bool new_rtcp_sr = false;
+ if (!UpdateRtcpList(
+ ntp_secs, ntp_frac, rtp_timestamp, &stream->rtcp, &new_rtcp_sr)) {
+ return -1;
+ }
+
+ return 0;
+}
+
+ViESyncModule::ViESyncModule(VideoCodingModule* vcm)
+ : data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ vcm_(vcm),
+ video_receiver_(NULL),
+ video_rtp_rtcp_(NULL),
+ voe_channel_id_(-1),
+ voe_sync_interface_(NULL),
+ last_sync_time_(TickTime::Now()),
+ sync_() {
+}
+
+ViESyncModule::~ViESyncModule() {
+}
+
+int ViESyncModule::ConfigureSync(int voe_channel_id,
+ VoEVideoSync* voe_sync_interface,
+ RtpRtcp* video_rtcp_module,
+ RtpReceiver* video_receiver) {
+ CriticalSectionScoped cs(data_cs_.get());
+ // Prevent expensive no-ops.
+ if (voe_channel_id_ == voe_channel_id &&
+ voe_sync_interface_ == voe_sync_interface &&
+ video_receiver_ == video_receiver &&
+ video_rtp_rtcp_ == video_rtcp_module) {
+ return 0;
+ }
+ voe_channel_id_ = voe_channel_id;
+ voe_sync_interface_ = voe_sync_interface;
+ video_receiver_ = video_receiver;
+ video_rtp_rtcp_ = video_rtcp_module;
+ sync_.reset(
+ new StreamSynchronization(video_rtp_rtcp_->SSRC(), voe_channel_id));
+
+ if (!voe_sync_interface) {
+ voe_channel_id_ = -1;
+ if (voe_channel_id >= 0) {
+ // Trying to set a voice channel but no interface exist.
+ return -1;
+ }
+ return 0;
+ }
+ return 0;
+}
+
+int ViESyncModule::VoiceChannel() {
+ return voe_channel_id_;
+}
+
+int64_t ViESyncModule::TimeUntilNextProcess() {
+ const int64_t kSyncIntervalMs = 1000;
+ return kSyncIntervalMs - (TickTime::Now() - last_sync_time_).Milliseconds();
+}
+
+int32_t ViESyncModule::Process() {
+ CriticalSectionScoped cs(data_cs_.get());
+ last_sync_time_ = TickTime::Now();
+
+ const int current_video_delay_ms = vcm_->Delay();
+
+ if (voe_channel_id_ == -1) {
+ return 0;
+ }
+ assert(video_rtp_rtcp_ && voe_sync_interface_);
+ assert(sync_.get());
+
+ int audio_jitter_buffer_delay_ms = 0;
+ int playout_buffer_delay_ms = 0;
+ if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
+ &audio_jitter_buffer_delay_ms,
+ &playout_buffer_delay_ms) != 0) {
+ return 0;
+ }
+ const int current_audio_delay_ms = audio_jitter_buffer_delay_ms +
+ playout_buffer_delay_ms;
+
+ RtpRtcp* voice_rtp_rtcp = NULL;
+ RtpReceiver* voice_receiver = NULL;
+ if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &voice_rtp_rtcp,
+ &voice_receiver)) {
+ return 0;
+ }
+ assert(voice_rtp_rtcp);
+ assert(voice_receiver);
+
+ if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_,
+ *video_receiver_) != 0) {
+ return 0;
+ }
+
+ if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp,
+ *voice_receiver) != 0) {
+ return 0;
+ }
+
+ int relative_delay_ms;
+ // Calculate how much later or earlier the audio stream is compared to video.
+ if (!sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_,
+ &relative_delay_ms)) {
+ return 0;
+ }
+
+ TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", current_video_delay_ms);
+ TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", current_audio_delay_ms);
+ TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms);
+ int target_audio_delay_ms = 0;
+ int target_video_delay_ms = current_video_delay_ms;
+ // Calculate the necessary extra audio delay and desired total video
+ // delay to get the streams in sync.
+ if (!sync_->ComputeDelays(relative_delay_ms,
+ current_audio_delay_ms,
+ &target_audio_delay_ms,
+ &target_video_delay_ms)) {
+ return 0;
+ }
+
+ if (voe_sync_interface_->SetMinimumPlayoutDelay(
+ voe_channel_id_, target_audio_delay_ms) == -1) {
+ LOG(LS_ERROR) << "Error setting voice delay.";
+ }
+ vcm_->SetMinimumPlayoutDelay(target_video_delay_ms);
+ return 0;
+}
+
+int ViESyncModule::SetTargetBufferingDelay(int target_delay_ms) {
+ CriticalSectionScoped cs(data_cs_.get());
+ if (!voe_sync_interface_) {
+ LOG(LS_ERROR) << "voe_sync_interface_ NULL, can't set playout delay.";
+ return -1;
+ }
+ sync_->SetTargetBufferingDelay(target_delay_ms);
+ // Setting initial playout delay to voice engine (video engine is updated via
+ // the VCM interface).
+ voe_sync_interface_->SetInitialPlayoutDelay(voe_channel_id_,
+ target_delay_ms);
+ return 0;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video_engine/vie_sync_module.h b/webrtc/video_engine/vie_sync_module.h
new file mode 100644
index 0000000000..ea2ae0bd51
--- /dev/null
+++ b/webrtc/video_engine/vie_sync_module.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESyncModule is responsible for synchronization audio and video for a given
+// VoE and ViE channel couple.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/interface/module.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video_engine/stream_synchronization.h"
+#include "webrtc/voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RtpRtcp;
+class VideoCodingModule;
+class ViEChannel;
+class VoEVideoSync;
+
+class ViESyncModule : public Module {
+ public:
+ explicit ViESyncModule(VideoCodingModule* vcm);
+ ~ViESyncModule();
+
+ int ConfigureSync(int voe_channel_id,
+ VoEVideoSync* voe_sync_interface,
+ RtpRtcp* video_rtcp_module,
+ RtpReceiver* video_receiver);
+
+ int VoiceChannel();
+
+ // Set target delay for buffering mode (0 = real-time mode).
+ int SetTargetBufferingDelay(int target_delay_ms);
+
+ // Implements Module.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ private:
+ rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
+ VideoCodingModule* const vcm_;
+ RtpReceiver* video_receiver_;
+ RtpRtcp* video_rtp_rtcp_;
+ int voe_channel_id_;
+ VoEVideoSync* voe_sync_interface_;
+ TickTime last_sync_time_;
+ rtc::scoped_ptr<StreamSynchronization> sync_;
+ StreamSynchronization::Measurements audio_measurement_;
+ StreamSynchronization::Measurements video_measurement_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_