summaryrefslogtreecommitdiff
path: root/video
diff options
context:
space:
mode:
authorpbos@webrtc.org <pbos@webrtc.org@4adac7df-926f-26a2-2b94-8c16560cd09d>2013-10-28 16:32:01 +0000
committerpbos@webrtc.org <pbos@webrtc.org@4adac7df-926f-26a2-2b94-8c16560cd09d>2013-10-28 16:32:01 +0000
commit24e2089750e9e51228b82d6c7ebf4fa064c797ba (patch)
treef964abcaca5d5609cbf1f7bcbbcbc1d8394c35c5 /video
parent4ce759057ec1d71b19bd706c5cdc9027096463fd (diff)
downloadwebrtc-24e2089750e9e51228b82d6c7ebf4fa064c797ba.tar.gz
Separate Call API/build files from video_engine/.
BUG=2535 R=andrew@webrtc.org, mflodman@webrtc.org, niklas.enbom@webrtc.org Review URL: https://webrtc-codereview.appspot.com/2659004 git-svn-id: http://webrtc.googlecode.com/svn/trunk/webrtc@5042 4adac7df-926f-26a2-2b94-8c16560cd09d
Diffstat (limited to 'video')
-rw-r--r--video/OWNERS4
-rw-r--r--video/full_stack.cc448
-rw-r--r--video/rampup_tests.cc192
-rw-r--r--video/transport_adapter.cc36
-rw-r--r--video/transport_adapter.h34
-rw-r--r--video/video_receive_stream.cc192
-rw-r--r--video/video_receive_stream.h81
-rw-r--r--video/video_send_stream.cc295
-rw-r--r--video/video_send_stream.h85
-rw-r--r--video/video_send_stream_tests.cc613
-rw-r--r--video/webrtc_video.gypi22
11 files changed, 2002 insertions, 0 deletions
diff --git a/video/OWNERS b/video/OWNERS
new file mode 100644
index 00000000..50640749
--- /dev/null
+++ b/video/OWNERS
@@ -0,0 +1,4 @@
+mflodman@webrtc.org
+stefan@webrtc.org
+wu@webrtc.org
+mallinath@webrtc.org
diff --git a/video/full_stack.cc b/video/full_stack.cc
new file mode 100644
index 00000000..b154df30
--- /dev/null
+++ b/video/full_stack.cc
@@ -0,0 +1,448 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <stdio.h>
+
+#include <deque>
+#include <map>
+
+#include "gflags/gflags.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/call.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/sleep.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/direct_transport.h"
+#include "webrtc/test/frame_generator_capturer.h"
+#include "webrtc/test/generate_ssrcs.h"
+#include "webrtc/test/statistics.h"
+#include "webrtc/test/video_renderer.h"
+#include "webrtc/typedefs.h"
+
+DEFINE_int32(seconds, 10, "Seconds to run each clip.");
+
+namespace webrtc {
+
+struct FullStackTestParams {
+ const char* test_label;
+ struct {
+ const char* name;
+ size_t width, height;
+ int fps;
+ } clip;
+ unsigned int bitrate;
+ double avg_psnr_threshold;
+ double avg_ssim_threshold;
+};
+
+FullStackTestParams paris_qcif = {
+ "net_delay_0_0_plr_0", {"paris_qcif", 176, 144, 30}, 300, 36.0, 0.96};
+
+// TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
+FullStackTestParams foreman_cif = {
+ "foreman_cif_net_delay_0_0_plr_0",
+ {"foreman_cif", 352, 288, 30},
+ 700,
+ 0.0,
+ 0.0};
+
+class FullStackTest : public ::testing::TestWithParam<FullStackTestParams> {
+ protected:
+ std::map<uint32_t, bool> reserved_ssrcs_;
+};
+
+class VideoAnalyzer : public PacketReceiver,
+ public newapi::Transport,
+ public VideoRenderer,
+ public VideoSendStreamInput {
+ public:
+ VideoAnalyzer(VideoSendStreamInput* input,
+ Transport* transport,
+ const char* test_label,
+ double avg_psnr_threshold,
+ double avg_ssim_threshold,
+ int duration_frames)
+ : input_(input),
+ transport_(transport),
+ receiver_(NULL),
+ test_label_(test_label),
+ dropped_frames_(0),
+ rtp_timestamp_delta_(0),
+ first_send_frame_(NULL),
+ last_render_time_(0),
+ avg_psnr_threshold_(avg_psnr_threshold),
+ avg_ssim_threshold_(avg_ssim_threshold),
+ frames_left_(duration_frames),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ comparison_lock_(CriticalSectionWrapper::CreateCriticalSection()),
+ comparison_thread_(ThreadWrapper::CreateThread(&FrameComparisonThread,
+ this)),
+ trigger_(EventWrapper::Create()) {
+ unsigned int id;
+ EXPECT_TRUE(comparison_thread_->Start(id));
+ }
+
+ ~VideoAnalyzer() {
+ EXPECT_TRUE(comparison_thread_->Stop());
+
+ while (!frames_.empty()) {
+ delete frames_.back();
+ frames_.pop_back();
+ }
+ while (!frame_pool_.empty()) {
+ delete frame_pool_.back();
+ frame_pool_.pop_back();
+ }
+ }
+
+ virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
+
+ virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE {
+ scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ RTPHeader header;
+ parser->Parse(packet, static_cast<int>(length), &header);
+ {
+ CriticalSectionScoped cs(crit_.get());
+ recv_times_[header.timestamp - rtp_timestamp_delta_] =
+ Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
+ }
+
+ return receiver_->DeliverPacket(packet, length);
+ }
+
+ virtual void PutFrame(const I420VideoFrame& video_frame,
+ uint32_t delta_capture_ms) OVERRIDE {
+ I420VideoFrame* copy = NULL;
+ {
+ CriticalSectionScoped cs(crit_.get());
+ if (frame_pool_.size() > 0) {
+ copy = frame_pool_.front();
+ frame_pool_.pop_front();
+ }
+ }
+ if (copy == NULL)
+ copy = new I420VideoFrame();
+
+ copy->CopyFrame(video_frame);
+ copy->set_timestamp(copy->render_time_ms() * 90);
+
+ {
+ CriticalSectionScoped cs(crit_.get());
+ if (first_send_frame_ == NULL && rtp_timestamp_delta_ == 0)
+ first_send_frame_ = copy;
+
+ frames_.push_back(copy);
+ }
+
+ input_->PutFrame(video_frame, delta_capture_ms);
+ }
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ RTPHeader header;
+ parser->Parse(packet, static_cast<int>(length), &header);
+
+ {
+ CriticalSectionScoped cs(crit_.get());
+ if (rtp_timestamp_delta_ == 0) {
+ rtp_timestamp_delta_ =
+ header.timestamp - first_send_frame_->timestamp();
+ first_send_frame_ = NULL;
+ }
+ send_times_[header.timestamp - rtp_timestamp_delta_] =
+ Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
+ }
+
+ return transport_->SendRTP(packet, length);
+ }
+
+ virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE {
+ return transport_->SendRTCP(packet, length);
+ }
+
+ virtual void RenderFrame(const I420VideoFrame& video_frame,
+ int time_to_render_ms) OVERRIDE {
+ int64_t render_time_ms =
+ Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
+ uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_;
+
+ {
+ CriticalSectionScoped cs(crit_.get());
+ while (frames_.front()->timestamp() < send_timestamp) {
+ AddFrameComparison(
+ frames_.front(), &last_rendered_frame_, true, render_time_ms);
+ frame_pool_.push_back(frames_.front());
+ frames_.pop_front();
+ }
+
+ I420VideoFrame* reference_frame = frames_.front();
+ frames_.pop_front();
+ assert(reference_frame != NULL);
+ EXPECT_EQ(reference_frame->timestamp(), send_timestamp);
+ assert(reference_frame->timestamp() == send_timestamp);
+
+ AddFrameComparison(reference_frame, &video_frame, false, render_time_ms);
+ frame_pool_.push_back(reference_frame);
+ }
+
+ last_rendered_frame_.CopyFrame(video_frame);
+ }
+
+ void Wait() { trigger_->Wait(120 * 1000); }
+
+ VideoSendStreamInput* input_;
+ Transport* transport_;
+ PacketReceiver* receiver_;
+
+ private:
+ struct FrameComparison {
+ FrameComparison(const I420VideoFrame* reference,
+ const I420VideoFrame* render,
+ bool dropped,
+ int64_t send_time_ms,
+ int64_t recv_time_ms,
+ int64_t render_time_ms)
+ : dropped(dropped),
+ send_time_ms(send_time_ms),
+ recv_time_ms(recv_time_ms),
+ render_time_ms(render_time_ms) {
+ this->reference.CopyFrame(*reference);
+ this->render.CopyFrame(*render);
+ }
+
+ FrameComparison(const FrameComparison& compare)
+ : dropped(compare.dropped),
+ send_time_ms(compare.send_time_ms),
+ recv_time_ms(compare.recv_time_ms),
+ render_time_ms(compare.render_time_ms) {
+ this->reference.CopyFrame(compare.reference);
+ this->render.CopyFrame(compare.render);
+ }
+
+ ~FrameComparison() {}
+
+ I420VideoFrame reference;
+ I420VideoFrame render;
+ bool dropped;
+ int64_t send_time_ms;
+ int64_t recv_time_ms;
+ int64_t render_time_ms;
+ };
+
+ void AddFrameComparison(const I420VideoFrame* reference,
+ const I420VideoFrame* render,
+ bool dropped,
+ int64_t render_time_ms) {
+ int64_t send_time_ms = send_times_[reference->timestamp()];
+ send_times_.erase(reference->timestamp());
+ int64_t recv_time_ms = recv_times_[reference->timestamp()];
+ recv_times_.erase(reference->timestamp());
+
+ CriticalSectionScoped crit(comparison_lock_.get());
+ comparisons_.push_back(FrameComparison(reference,
+ render,
+ dropped,
+ send_time_ms,
+ recv_time_ms,
+ render_time_ms));
+ }
+
+ static bool FrameComparisonThread(void* obj) {
+ return static_cast<VideoAnalyzer*>(obj)->CompareFrames();
+ }
+
+ bool CompareFrames() {
+ assert(frames_left_ > 0);
+
+ I420VideoFrame reference;
+ I420VideoFrame render;
+ bool dropped;
+ int64_t send_time_ms;
+ int64_t recv_time_ms;
+ int64_t render_time_ms;
+
+ SleepMs(10);
+
+ while (true) {
+ {
+ CriticalSectionScoped crit(comparison_lock_.get());
+ if (comparisons_.empty())
+ return true;
+ reference.SwapFrame(&comparisons_.front().reference);
+ render.SwapFrame(&comparisons_.front().render);
+ dropped = comparisons_.front().dropped;
+ send_time_ms = comparisons_.front().send_time_ms;
+ recv_time_ms = comparisons_.front().recv_time_ms;
+ render_time_ms = comparisons_.front().render_time_ms;
+ comparisons_.pop_front();
+ }
+
+ PerformFrameComparison(&reference,
+ &render,
+ dropped,
+ send_time_ms,
+ recv_time_ms,
+ render_time_ms);
+
+ if (--frames_left_ == 0) {
+ PrintResult("psnr", psnr_, " dB");
+ PrintResult("ssim", ssim_, "");
+ PrintResult("sender_time", sender_time_, " ms");
+ printf(
+ "RESULT dropped_frames: %s = %d\n", test_label_, dropped_frames_);
+ PrintResult("receiver_time", receiver_time_, " ms");
+ PrintResult("total_delay_incl_network", end_to_end_, " ms");
+ PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
+ EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
+ EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
+ trigger_->Set();
+
+ return false;
+ }
+ }
+ }
+
+ void PerformFrameComparison(const I420VideoFrame* reference,
+ const I420VideoFrame* render,
+ bool dropped,
+ int64_t send_time_ms,
+ int64_t recv_time_ms,
+ int64_t render_time_ms) {
+ psnr_.AddSample(I420PSNR(reference, render));
+ ssim_.AddSample(I420SSIM(reference, render));
+ if (dropped) {
+ ++dropped_frames_;
+ return;
+ }
+ if (last_render_time_ != 0)
+ rendered_delta_.AddSample(render_time_ms - last_render_time_);
+ last_render_time_ = render_time_ms;
+
+ int64_t input_time_ms = reference->render_time_ms();
+ sender_time_.AddSample(send_time_ms - input_time_ms);
+ receiver_time_.AddSample(render_time_ms - recv_time_ms);
+ end_to_end_.AddSample(render_time_ms - input_time_ms);
+ }
+
+ void PrintResult(const char* result_type,
+ test::Statistics stats,
+ const char* unit) {
+ printf("RESULT %s: %s = {%f, %f}%s\n",
+ result_type,
+ test_label_,
+ stats.Mean(),
+ stats.StandardDeviation(),
+ unit);
+ }
+
+ const char* test_label_;
+ test::Statistics sender_time_;
+ test::Statistics receiver_time_;
+ test::Statistics psnr_;
+ test::Statistics ssim_;
+ test::Statistics end_to_end_;
+ test::Statistics rendered_delta_;
+
+ int dropped_frames_;
+ std::deque<I420VideoFrame*> frames_;
+ std::deque<I420VideoFrame*> frame_pool_;
+ I420VideoFrame last_rendered_frame_;
+ std::map<uint32_t, int64_t> send_times_;
+ std::map<uint32_t, int64_t> recv_times_;
+ uint32_t rtp_timestamp_delta_;
+ I420VideoFrame* first_send_frame_;
+ int64_t last_render_time_;
+ double avg_psnr_threshold_;
+ double avg_ssim_threshold_;
+ int frames_left_;
+ scoped_ptr<CriticalSectionWrapper> crit_;
+ scoped_ptr<CriticalSectionWrapper> comparison_lock_;
+ scoped_ptr<ThreadWrapper> comparison_thread_;
+ std::deque<FrameComparison> comparisons_;
+ scoped_ptr<EventWrapper> trigger_;
+};
+
+TEST_P(FullStackTest, NoPacketLoss) {
+ FullStackTestParams params = GetParam();
+
+ test::DirectTransport transport;
+ VideoAnalyzer analyzer(NULL,
+ &transport,
+ params.test_label,
+ params.avg_psnr_threshold,
+ params.avg_ssim_threshold,
+ FLAGS_seconds * params.clip.fps);
+
+ Call::Config call_config(&analyzer);
+
+ scoped_ptr<Call> call(Call::Create(call_config));
+ analyzer.SetReceiver(call->Receiver());
+ transport.SetReceiver(&analyzer);
+
+ VideoSendStream::Config send_config = call->GetDefaultSendConfig();
+ test::GenerateRandomSsrcs(&send_config, &reserved_ssrcs_);
+
+ // TODO(pbos): static_cast shouldn't be required after mflodman refactors the
+ // VideoCodec struct.
+ send_config.codec.width = static_cast<uint16_t>(params.clip.width);
+ send_config.codec.height = static_cast<uint16_t>(params.clip.height);
+ send_config.codec.minBitrate = params.bitrate;
+ send_config.codec.startBitrate = params.bitrate;
+ send_config.codec.maxBitrate = params.bitrate;
+
+ VideoSendStream* send_stream = call->CreateSendStream(send_config);
+ analyzer.input_ = send_stream->Input();
+
+ scoped_ptr<test::FrameGeneratorCapturer> file_capturer(
+ test::FrameGeneratorCapturer::CreateFromYuvFile(
+ &analyzer,
+ test::ResourcePath(params.clip.name, "yuv").c_str(),
+ params.clip.width,
+ params.clip.height,
+ params.clip.fps,
+ Clock::GetRealTimeClock()));
+ ASSERT_TRUE(file_capturer.get() != NULL)
+ << "Could not create capturer for " << params.clip.name
+ << ".yuv. Is this resource file present?";
+
+ VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig();
+ receive_config.rtp.ssrc = send_config.rtp.ssrcs[0];
+ receive_config.renderer = &analyzer;
+
+ VideoReceiveStream* receive_stream =
+ call->CreateReceiveStream(receive_config);
+
+ receive_stream->StartReceive();
+ send_stream->StartSend();
+
+ file_capturer->Start();
+
+ analyzer.Wait();
+
+ file_capturer->Stop();
+ send_stream->StopSend();
+ receive_stream->StopReceive();
+
+ call->DestroyReceiveStream(receive_stream);
+ call->DestroySendStream(send_stream);
+
+ transport.StopSending();
+}
+
+INSTANTIATE_TEST_CASE_P(FullStack,
+ FullStackTest,
+ ::testing::Values(paris_qcif, foreman_cif));
+
+} // namespace webrtc
diff --git a/video/rampup_tests.cc b/video/rampup_tests.cc
new file mode 100644
index 00000000..c668458a
--- /dev/null
+++ b/video/rampup_tests.cc
@@ -0,0 +1,192 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <assert.h>
+
+#include <map>
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/call.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/test/direct_transport.h"
+#include "webrtc/test/fake_decoder.h"
+#include "webrtc/test/fake_encoder.h"
+#include "webrtc/test/frame_generator_capturer.h"
+#include "webrtc/test/generate_ssrcs.h"
+
+namespace webrtc {
+
+namespace {
+ static const int kTOffsetExtensionId = 7;
+}
+
+class StreamObserver : public newapi::Transport, public RemoteBitrateObserver {
+ public:
+ typedef std::map<uint32_t, int> BytesSentMap;
+ StreamObserver(int num_expected_ssrcs,
+ newapi::Transport* feedback_transport,
+ Clock* clock)
+ : critical_section_(CriticalSectionWrapper::CreateCriticalSection()),
+ all_ssrcs_sent_(EventWrapper::Create()),
+ rtp_parser_(RtpHeaderParser::Create()),
+ feedback_transport_(new TransportWrapper(feedback_transport)),
+ receive_stats_(ReceiveStatistics::Create(clock)),
+ clock_(clock),
+ num_expected_ssrcs_(num_expected_ssrcs) {
+ // Ideally we would only have to instantiate an RtcpSender, an
+ // RtpHeaderParser and a RemoteBitrateEstimator here, but due to the current
+ // state of the RTP module we need a full module and receive statistics to
+ // be able to produce an RTCP with REMB.
+ RtpRtcp::Configuration config;
+ config.receive_statistics = receive_stats_.get();
+ config.outgoing_transport = feedback_transport_.get();
+ rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config));
+ rtp_rtcp_->SetREMBStatus(true);
+ rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound);
+ rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
+ kTOffsetExtensionId);
+ AbsoluteSendTimeRemoteBitrateEstimatorFactory rbe_factory;
+ remote_bitrate_estimator_.reset(rbe_factory.Create(this, clock));
+ }
+
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) {
+ CriticalSectionScoped lock(critical_section_.get());
+ if (ssrcs.size() == num_expected_ssrcs_ && bitrate >= kExpectedBitrateBps)
+ all_ssrcs_sent_->Set();
+ rtp_rtcp_->SetREMBData(
+ bitrate, static_cast<uint8_t>(ssrcs.size()), &ssrcs[0]);
+ rtp_rtcp_->Process();
+ }
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ CriticalSectionScoped lock(critical_section_.get());
+ RTPHeader header;
+ EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast<int>(length), &header));
+ receive_stats_->IncomingPacket(header, length, false);
+ rtp_rtcp_->SetRemoteSSRC(header.ssrc);
+ remote_bitrate_estimator_->IncomingPacket(
+ clock_->TimeInMilliseconds(), static_cast<int>(length - 12), header);
+ if (remote_bitrate_estimator_->TimeUntilNextProcess() <= 0) {
+ remote_bitrate_estimator_->Process();
+ }
+ return true;
+ }
+
+ virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE {
+ return true;
+ }
+
+ EventTypeWrapper Wait() { return all_ssrcs_sent_->Wait(120 * 1000); }
+
+ private:
+ class TransportWrapper : public webrtc::Transport {
+ public:
+ explicit TransportWrapper(newapi::Transport* new_transport)
+ : new_transport_(new_transport) {}
+
+ virtual int SendPacket(int channel, const void* data, int len) OVERRIDE {
+ return new_transport_->SendRTP(static_cast<const uint8_t*>(data), len)
+ ? len
+ : -1;
+ }
+
+ virtual int SendRTCPPacket(int channel,
+ const void* data,
+ int len) OVERRIDE {
+ return new_transport_->SendRTCP(static_cast<const uint8_t*>(data), len)
+ ? len
+ : -1;
+ }
+
+ private:
+ newapi::Transport* new_transport_;
+ };
+
+ static const unsigned int kExpectedBitrateBps = 1200000;
+
+ scoped_ptr<CriticalSectionWrapper> critical_section_;
+ scoped_ptr<EventWrapper> all_ssrcs_sent_;
+ scoped_ptr<RtpHeaderParser> rtp_parser_;
+ scoped_ptr<RtpRtcp> rtp_rtcp_;
+ scoped_ptr<TransportWrapper> feedback_transport_;
+ scoped_ptr<ReceiveStatistics> receive_stats_;
+ scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+ Clock* clock_;
+ const size_t num_expected_ssrcs_;
+};
+
+class RampUpTest : public ::testing::TestWithParam<bool> {
+ public:
+ virtual void SetUp() { reserved_ssrcs_.clear(); }
+
+ protected:
+ std::map<uint32_t, bool> reserved_ssrcs_;
+};
+
+TEST_P(RampUpTest, RampUpWithPadding) {
+ test::DirectTransport receiver_transport;
+ StreamObserver stream_observer(
+ 3, &receiver_transport, Clock::GetRealTimeClock());
+ Call::Config call_config(&stream_observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+ VideoSendStream::Config send_config = call->GetDefaultSendConfig();
+
+ receiver_transport.SetReceiver(call->Receiver());
+
+ test::FakeEncoder encoder(Clock::GetRealTimeClock());
+ send_config.encoder = &encoder;
+ send_config.internal_source = false;
+ test::FakeEncoder::SetCodecSettings(&send_config.codec, 3);
+ send_config.pacing = GetParam();
+ send_config.rtp.extensions.push_back(
+ RtpExtension("toffset", kTOffsetExtensionId));
+
+ test::GenerateRandomSsrcs(&send_config, &reserved_ssrcs_);
+
+ VideoSendStream* send_stream = call->CreateSendStream(send_config);
+
+ VideoReceiveStream::Config receive_config;
+ receive_config.rtp.ssrc = send_config.rtp.ssrcs[0];
+ receive_config.rtp.nack.rtp_history_ms = send_config.rtp.nack.rtp_history_ms;
+ VideoReceiveStream* receive_stream =
+ call->CreateReceiveStream(receive_config);
+
+ scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
+ test::FrameGeneratorCapturer::Create(send_stream->Input(),
+ send_config.codec.width,
+ send_config.codec.height,
+ 30,
+ Clock::GetRealTimeClock()));
+
+ receive_stream->StartReceive();
+ send_stream->StartSend();
+ frame_generator_capturer->Start();
+
+ EXPECT_EQ(kEventSignaled, stream_observer.Wait());
+
+ frame_generator_capturer->Stop();
+ send_stream->StopSend();
+ receive_stream->StopReceive();
+
+ call->DestroyReceiveStream(receive_stream);
+ call->DestroySendStream(send_stream);
+}
+
+INSTANTIATE_TEST_CASE_P(RampUpTest, RampUpTest, ::testing::Bool());
+
+} // namespace webrtc
diff --git a/video/transport_adapter.cc b/video/transport_adapter.cc
new file mode 100644
index 00000000..1d325041
--- /dev/null
+++ b/video/transport_adapter.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/transport_adapter.h"
+
+namespace webrtc {
+namespace internal {
+
+TransportAdapter::TransportAdapter(newapi::Transport* transport)
+ : transport_(transport) {}
+
+int TransportAdapter::SendPacket(int /*channel*/,
+ const void* packet,
+ int length) {
+ bool success = transport_->SendRTP(static_cast<const uint8_t*>(packet),
+ static_cast<size_t>(length));
+ return success ? length : -1;
+}
+
+int TransportAdapter::SendRTCPPacket(int /*channel*/,
+ const void* packet,
+ int length) {
+ bool success = transport_->SendRTCP(static_cast<const uint8_t*>(packet),
+ static_cast<size_t>(length));
+ return success ? length : -1;
+}
+
+} // namespace internal
+} // namespace webrtc
diff --git a/video/transport_adapter.h b/video/transport_adapter.h
new file mode 100644
index 00000000..3686f38a
--- /dev/null
+++ b/video/transport_adapter.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_VIDEO_ENGINE_INTERNAL_TRANSPORT_ADAPTER_H_
+#define WEBRTC_VIDEO_ENGINE_INTERNAL_TRANSPORT_ADAPTER_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/transport.h"
+
+namespace webrtc {
+namespace internal {
+
+class TransportAdapter : public webrtc::Transport {
+ public:
+ explicit TransportAdapter(newapi::Transport* transport);
+
+ virtual int SendPacket(int /*channel*/, const void* packet, int length)
+ OVERRIDE;
+ virtual int SendRTCPPacket(int /*channel*/, const void* packet, int length)
+ OVERRIDE;
+
+ private:
+ newapi::Transport *transport_;
+};
+} // namespace internal
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_INTERNAL_TRANSPORT_ADAPTER_H_
diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc
new file mode 100644
index 00000000..a84c6d27
--- /dev/null
+++ b/video/video_receive_stream.cc
@@ -0,0 +1,192 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/video_receive_stream.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/video_engine/include/vie_base.h"
+#include "webrtc/video_engine/include/vie_capture.h"
+#include "webrtc/video_engine/include/vie_codec.h"
+#include "webrtc/video_engine/include/vie_external_codec.h"
+#include "webrtc/video_engine/include/vie_image_process.h"
+#include "webrtc/video_engine/include/vie_network.h"
+#include "webrtc/video_engine/include/vie_render.h"
+#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
+#include "webrtc/video_receive_stream.h"
+
+namespace webrtc {
+namespace internal {
+
+VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
+ const VideoReceiveStream::Config& config,
+ newapi::Transport* transport)
+ : transport_adapter_(transport), config_(config), channel_(-1) {
+ video_engine_base_ = ViEBase::GetInterface(video_engine);
+ // TODO(mflodman): Use the other CreateChannel method.
+ video_engine_base_->CreateChannel(channel_);
+ assert(channel_ != -1);
+
+ rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine);
+ assert(rtp_rtcp_ != NULL);
+
+ // TODO(pbos): This is not fine grained enough...
+ rtp_rtcp_->SetNACKStatus(channel_, config_.rtp.nack.rtp_history_ms > 0);
+ rtp_rtcp_->SetKeyFrameRequestMethod(channel_, kViEKeyFrameRequestPliRtcp);
+ switch (config_.rtp.rtcp_mode) {
+ case newapi::kRtcpCompound:
+ rtp_rtcp_->SetRTCPStatus(channel_, kRtcpCompound_RFC4585);
+ break;
+ case newapi::kRtcpReducedSize:
+ rtp_rtcp_->SetRTCPStatus(channel_, kRtcpNonCompound_RFC5506);
+ break;
+ }
+
+ assert(config_.rtp.ssrc != 0);
+
+ network_ = ViENetwork::GetInterface(video_engine);
+ assert(network_ != NULL);
+
+ network_->RegisterSendTransport(channel_, transport_adapter_);
+
+ codec_ = ViECodec::GetInterface(video_engine);
+
+ for (size_t i = 0; i < config_.codecs.size(); ++i) {
+ if (codec_->SetReceiveCodec(channel_, config_.codecs[i]) != 0) {
+ // TODO(pbos): Abort gracefully, this can be a runtime error.
+ // Factor out to an Init() method.
+ abort();
+ }
+ }
+
+ external_codec_ = ViEExternalCodec::GetInterface(video_engine);
+ for (size_t i = 0; i < config_.external_decoders.size(); ++i) {
+ ExternalVideoDecoder* decoder = &config_.external_decoders[i];
+ if (external_codec_->RegisterExternalReceiveCodec(
+ channel_,
+ decoder->payload_type,
+ decoder->decoder,
+ decoder->renderer,
+ decoder->expected_delay_ms) !=
+ 0) {
+ // TODO(pbos): Abort gracefully? Can this be a runtime error?
+ abort();
+ }
+ }
+
+ render_ = webrtc::ViERender::GetInterface(video_engine);
+ assert(render_ != NULL);
+
+ render_->AddRenderer(channel_, kVideoI420, this);
+
+ image_process_ = ViEImageProcess::GetInterface(video_engine);
+ image_process_->RegisterPreRenderCallback(channel_,
+ config_.pre_render_callback);
+
+ clock_ = Clock::GetRealTimeClock();
+}
+
+VideoReceiveStream::~VideoReceiveStream() {
+ image_process_->DeRegisterPreEncodeCallback(channel_);
+
+ render_->RemoveRenderer(channel_);
+
+ for (size_t i = 0; i < config_.external_decoders.size(); ++i) {
+ external_codec_->DeRegisterExternalReceiveCodec(
+ channel_, config_.external_decoders[i].payload_type);
+ }
+
+ network_->DeregisterSendTransport(channel_);
+
+ image_process_->Release();
+ video_engine_base_->Release();
+ external_codec_->Release();
+ codec_->Release();
+ network_->Release();
+ render_->Release();
+ rtp_rtcp_->Release();
+}
+
+void VideoReceiveStream::StartReceive() {
+ if (render_->StartRender(channel_)) {
+ abort();
+ }
+ if (video_engine_base_->StartReceive(channel_) != 0) {
+ abort();
+ }
+}
+
+void VideoReceiveStream::StopReceive() {
+ if (render_->StopRender(channel_)) {
+ abort();
+ }
+ if (video_engine_base_->StopReceive(channel_) != 0) {
+ abort();
+ }
+}
+
+void VideoReceiveStream::GetCurrentReceiveCodec(VideoCodec* receive_codec) {
+ // TODO(pbos): Implement
+}
+
+bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+ return network_->ReceivedRTCPPacket(
+ channel_, packet, static_cast<int>(length)) == 0;
+}
+
+bool VideoReceiveStream::DeliverRtp(const uint8_t* packet, size_t length) {
+ return network_->ReceivedRTPPacket(
+ channel_, packet, static_cast<int>(length)) == 0;
+}
+
+int VideoReceiveStream::FrameSizeChange(unsigned int width,
+ unsigned int height,
+ unsigned int /*number_of_streams*/) {
+ width_ = width;
+ height_ = height;
+ return 0;
+}
+
+int VideoReceiveStream::DeliverFrame(uint8_t* frame,
+ int buffer_size,
+ uint32_t timestamp,
+ int64_t render_time,
+ void* /*handle*/) {
+ if (config_.renderer == NULL) {
+ return 0;
+ }
+
+ I420VideoFrame video_frame;
+ video_frame.CreateEmptyFrame(width_, height_, width_, height_, height_);
+ ConvertToI420(kI420,
+ frame,
+ 0,
+ 0,
+ width_,
+ height_,
+ buffer_size,
+ webrtc::kRotateNone,
+ &video_frame);
+ video_frame.set_timestamp(timestamp);
+ video_frame.set_render_time_ms(render_time);
+
+ config_.renderer->RenderFrame(video_frame,
+ render_time - clock_->TimeInMilliseconds());
+
+ return 0;
+}
+
+bool VideoReceiveStream::IsTextureSupported() { return false; }
+
+} // internal
+} // webrtc
diff --git a/video/video_receive_stream.h b/video/video_receive_stream.h
new file mode 100644
index 00000000..c2352f48
--- /dev/null
+++ b/video/video_receive_stream.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIDEO_RECEIVE_STREAM_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIDEO_RECEIVE_STREAM_IMPL_H_
+
+#include <vector>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/video/transport_adapter.h"
+#include "webrtc/video_engine/include/vie_render.h"
+#include "webrtc/video_receive_stream.h"
+
+namespace webrtc {
+
+class VideoEngine;
+class ViEBase;
+class ViECodec;
+class ViEExternalCodec;
+class ViEImageProcess;
+class ViENetwork;
+class ViERender;
+class ViERTP_RTCP;
+
+namespace internal {
+
+class VideoReceiveStream : public webrtc::VideoReceiveStream,
+ public webrtc::ExternalRenderer {
+ public:
+ VideoReceiveStream(webrtc::VideoEngine* video_engine,
+ const VideoReceiveStream::Config& config,
+ newapi::Transport* transport);
+ virtual ~VideoReceiveStream();
+
+ virtual void StartReceive() OVERRIDE;
+ virtual void StopReceive() OVERRIDE;
+
+ virtual void GetCurrentReceiveCodec(VideoCodec* receive_codec) OVERRIDE;
+
+ virtual int FrameSizeChange(unsigned int width, unsigned int height,
+ unsigned int /*number_of_streams*/) OVERRIDE;
+ virtual int DeliverFrame(uint8_t* frame, int buffer_size, uint32_t timestamp,
+ int64_t render_time, void* /*handle*/) OVERRIDE;
+
+ virtual bool IsTextureSupported() OVERRIDE;
+
+ public:
+ virtual bool DeliverRtcp(const uint8_t* packet, size_t length);
+ virtual bool DeliverRtp(const uint8_t* packet, size_t length);
+
+ private:
+ TransportAdapter transport_adapter_;
+ VideoReceiveStream::Config config_;
+ Clock* clock_;
+
+ ViEBase* video_engine_base_;
+ ViECodec* codec_;
+ ViEExternalCodec* external_codec_;
+ ViENetwork* network_;
+ ViERender* render_;
+ ViERTP_RTCP* rtp_rtcp_;
+ ViEImageProcess* image_process_;
+
+ int channel_;
+
+ // TODO(pbos): Remove VideoReceiveStream can operate on I420 frames directly.
+ unsigned int height_;
+ unsigned int width_;
+};
+} // internal
+} // webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_INTERNAL_VIDEO_RECEIVE_STREAM_H_
diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc
new file mode 100644
index 00000000..8814c359
--- /dev/null
+++ b/video/video_send_stream.cc
@@ -0,0 +1,295 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/video_send_stream.h"
+
+#include <string.h>
+
+#include <string>
+#include <vector>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/video_engine/include/vie_base.h"
+#include "webrtc/video_engine/include/vie_capture.h"
+#include "webrtc/video_engine/include/vie_codec.h"
+#include "webrtc/video_engine/include/vie_external_codec.h"
+#include "webrtc/video_engine/include/vie_image_process.h"
+#include "webrtc/video_engine/include/vie_network.h"
+#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
+#include "webrtc/video_send_stream.h"
+
+namespace webrtc {
+namespace internal {
+
+// Super simple and temporary overuse logic. This will move to the application
+// as soon as the new API allows changing send codec on the fly.
+class ResolutionAdaptor : public webrtc::CpuOveruseObserver {
+ public:
+ ResolutionAdaptor(ViECodec* codec, int channel, size_t width, size_t height)
+ : codec_(codec),
+ channel_(channel),
+ max_width_(width),
+ max_height_(height) {}
+
+ virtual ~ResolutionAdaptor() {}
+
+ virtual void OveruseDetected() OVERRIDE {
+ VideoCodec codec;
+ if (codec_->GetSendCodec(channel_, codec) != 0)
+ return;
+
+ if (codec.width / 2 < min_width || codec.height / 2 < min_height)
+ return;
+
+ codec.width /= 2;
+ codec.height /= 2;
+ codec_->SetSendCodec(channel_, codec);
+ }
+
+ virtual void NormalUsage() OVERRIDE {
+ VideoCodec codec;
+ if (codec_->GetSendCodec(channel_, codec) != 0)
+ return;
+
+ if (codec.width * 2u > max_width_ || codec.height * 2u > max_height_)
+ return;
+
+ codec.width *= 2;
+ codec.height *= 2;
+ codec_->SetSendCodec(channel_, codec);
+ }
+
+ private:
+ // Temporary and arbitrary chosen minimum resolution.
+ static const size_t min_width = 160;
+ static const size_t min_height = 120;
+
+ ViECodec* codec_;
+ const int channel_;
+
+ const size_t max_width_;
+ const size_t max_height_;
+};
+
+VideoSendStream::VideoSendStream(newapi::Transport* transport,
+ bool overuse_detection,
+ webrtc::VideoEngine* video_engine,
+ const VideoSendStream::Config& config)
+ : transport_adapter_(transport), config_(config), external_codec_(NULL) {
+
+ if (config_.codec.numberOfSimulcastStreams > 0) {
+ assert(config_.rtp.ssrcs.size() == config_.codec.numberOfSimulcastStreams);
+ } else {
+ assert(config_.rtp.ssrcs.size() == 1);
+ }
+
+ video_engine_base_ = ViEBase::GetInterface(video_engine);
+ video_engine_base_->CreateChannel(channel_);
+ assert(channel_ != -1);
+
+ rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine);
+ assert(rtp_rtcp_ != NULL);
+
+ if (config_.rtp.ssrcs.size() == 1) {
+ rtp_rtcp_->SetLocalSSRC(channel_, config_.rtp.ssrcs[0]);
+ } else {
+ for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
+ rtp_rtcp_->SetLocalSSRC(channel_,
+ config_.rtp.ssrcs[i],
+ kViEStreamTypeNormal,
+ static_cast<unsigned char>(i));
+ }
+ }
+ rtp_rtcp_->SetTransmissionSmoothingStatus(channel_, config_.pacing);
+ if (!config_.rtp.rtx.ssrcs.empty()) {
+ assert(config_.rtp.rtx.ssrcs.size() == config_.rtp.ssrcs.size());
+ for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
+ rtp_rtcp_->SetLocalSSRC(channel_,
+ config_.rtp.rtx.ssrcs[i],
+ kViEStreamTypeRtx,
+ static_cast<unsigned char>(i));
+ }
+
+ if (config_.rtp.rtx.rtx_payload_type != 0) {
+ rtp_rtcp_->SetRtxSendPayloadType(channel_,
+ config_.rtp.rtx.rtx_payload_type);
+ }
+ }
+
+ for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) {
+ const std::string& extension = config_.rtp.extensions[i].name;
+ int id = config_.rtp.extensions[i].id;
+ if (extension == "toffset") {
+ if (rtp_rtcp_->SetSendTimestampOffsetStatus(channel_, true, id) != 0)
+ abort();
+ } else if (extension == "abs-send-time") {
+ if (rtp_rtcp_->SetSendAbsoluteSendTimeStatus(channel_, true, id) != 0)
+ abort();
+ } else {
+ abort(); // Unsupported extension.
+ }
+ }
+
+ // Enable NACK, FEC or both.
+ if (config_.rtp.fec.red_payload_type != -1) {
+ assert(config_.rtp.fec.ulpfec_payload_type != -1);
+ if (config_.rtp.nack.rtp_history_ms > 0) {
+ rtp_rtcp_->SetHybridNACKFECStatus(
+ channel_,
+ true,
+ static_cast<unsigned char>(config_.rtp.fec.red_payload_type),
+ static_cast<unsigned char>(config_.rtp.fec.ulpfec_payload_type));
+ } else {
+ rtp_rtcp_->SetFECStatus(
+ channel_,
+ true,
+ static_cast<unsigned char>(config_.rtp.fec.red_payload_type),
+ static_cast<unsigned char>(config_.rtp.fec.ulpfec_payload_type));
+ }
+ } else {
+ rtp_rtcp_->SetNACKStatus(channel_, config_.rtp.nack.rtp_history_ms > 0);
+ }
+
+ char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength];
+ assert(config_.rtp.c_name.length() < ViERTP_RTCP::KMaxRTCPCNameLength);
+ strncpy(rtcp_cname, config_.rtp.c_name.c_str(), sizeof(rtcp_cname) - 1);
+ rtcp_cname[sizeof(rtcp_cname) - 1] = '\0';
+
+ rtp_rtcp_->SetRTCPCName(channel_, rtcp_cname);
+
+ capture_ = ViECapture::GetInterface(video_engine);
+ capture_->AllocateExternalCaptureDevice(capture_id_, external_capture_);
+ capture_->ConnectCaptureDevice(capture_id_, channel_);
+
+ network_ = ViENetwork::GetInterface(video_engine);
+ assert(network_ != NULL);
+
+ network_->RegisterSendTransport(channel_, transport_adapter_);
+ // 28 to match packet overhead in ModuleRtpRtcpImpl.
+ network_->SetMTU(channel_,
+ static_cast<unsigned int>(config_.rtp.max_packet_size + 28));
+
+ if (config.encoder) {
+ external_codec_ = ViEExternalCodec::GetInterface(video_engine);
+ if (external_codec_->RegisterExternalSendCodec(
+ channel_, config.codec.plType, config.encoder,
+ config.internal_source) != 0) {
+ abort();
+ }
+ }
+
+ codec_ = ViECodec::GetInterface(video_engine);
+ if (codec_->SetSendCodec(channel_, config_.codec) != 0) {
+ abort();
+ }
+
+ if (overuse_detection) {
+ overuse_observer_.reset(
+ new ResolutionAdaptor(codec_, channel_, config_.codec.width,
+ config_.codec.height));
+ video_engine_base_->RegisterCpuOveruseObserver(channel_,
+ overuse_observer_.get());
+ }
+
+ image_process_ = ViEImageProcess::GetInterface(video_engine);
+ image_process_->RegisterPreEncodeCallback(channel_,
+ config_.pre_encode_callback);
+
+ if (config.auto_mute) {
+ codec_->EnableAutoMuting(channel_);
+ }
+}
+
+VideoSendStream::~VideoSendStream() {
+ image_process_->DeRegisterPreEncodeCallback(channel_);
+
+ network_->DeregisterSendTransport(channel_);
+
+ capture_->DisconnectCaptureDevice(channel_);
+ capture_->ReleaseCaptureDevice(capture_id_);
+
+ if (external_codec_) {
+ external_codec_->DeRegisterExternalSendCodec(channel_,
+ config_.codec.plType);
+ }
+
+ video_engine_base_->DeleteChannel(channel_);
+
+ image_process_->Release();
+ video_engine_base_->Release();
+ capture_->Release();
+ codec_->Release();
+ if (external_codec_)
+ external_codec_->Release();
+ network_->Release();
+ rtp_rtcp_->Release();
+}
+
+void VideoSendStream::PutFrame(const I420VideoFrame& frame,
+ uint32_t time_since_capture_ms) {
+ // TODO(pbos): frame_copy should happen after the VideoProcessingModule has
+ // resized the frame.
+ I420VideoFrame frame_copy;
+ frame_copy.CopyFrame(frame);
+
+ ViEVideoFrameI420 vf;
+
+ // TODO(pbos): This represents a memcpy step and is only required because
+ // external_capture_ only takes ViEVideoFrameI420s.
+ vf.y_plane = frame_copy.buffer(kYPlane);
+ vf.u_plane = frame_copy.buffer(kUPlane);
+ vf.v_plane = frame_copy.buffer(kVPlane);
+ vf.y_pitch = frame.stride(kYPlane);
+ vf.u_pitch = frame.stride(kUPlane);
+ vf.v_pitch = frame.stride(kVPlane);
+ vf.width = frame.width();
+ vf.height = frame.height();
+
+ external_capture_->IncomingFrameI420(vf, frame.render_time_ms());
+
+ if (config_.local_renderer != NULL) {
+ config_.local_renderer->RenderFrame(frame, 0);
+ }
+}
+
+VideoSendStreamInput* VideoSendStream::Input() { return this; }
+
+void VideoSendStream::StartSend() {
+ if (video_engine_base_->StartSend(channel_) != 0)
+ abort();
+ if (video_engine_base_->StartReceive(channel_) != 0)
+ abort();
+}
+
+void VideoSendStream::StopSend() {
+ if (video_engine_base_->StopSend(channel_) != 0)
+ abort();
+ if (video_engine_base_->StopReceive(channel_) != 0)
+ abort();
+}
+
+bool VideoSendStream::SetTargetBitrate(
+ int min_bitrate,
+ int max_bitrate,
+ const std::vector<SimulcastStream>& streams) {
+ return false;
+}
+
+void VideoSendStream::GetSendCodec(VideoCodec* send_codec) {
+ *send_codec = config_.codec;
+}
+
+bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+ return network_->ReceivedRTCPPacket(
+ channel_, packet, static_cast<int>(length)) == 0;
+}
+
+} // namespace internal
+} // namespace webrtc
diff --git a/video/video_send_stream.h b/video/video_send_stream.h
new file mode 100644
index 00000000..304d825d
--- /dev/null
+++ b/video/video_send_stream.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIDEO_SEND_STREAM_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIDEO_SEND_STREAM_IMPL_H_
+
+#include <vector>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/video/transport_adapter.h"
+#include "webrtc/video_receive_stream.h"
+#include "webrtc/video_send_stream.h"
+
+namespace webrtc {
+
+class VideoEngine;
+class ViEBase;
+class ViECapture;
+class ViECodec;
+class ViEExternalCapture;
+class ViEExternalCodec;
+class ViEImageProcess;
+class ViENetwork;
+class ViERTP_RTCP;
+
+namespace internal {
+
+class ResolutionAdaptor;
+
+class VideoSendStream : public webrtc::VideoSendStream,
+ public VideoSendStreamInput {
+ public:
+ VideoSendStream(newapi::Transport* transport,
+ bool overuse_detection,
+ webrtc::VideoEngine* video_engine,
+ const VideoSendStream::Config& config);
+
+ virtual ~VideoSendStream();
+
+ virtual void PutFrame(const I420VideoFrame& frame,
+ uint32_t time_since_capture_ms) OVERRIDE;
+
+ virtual VideoSendStreamInput* Input() OVERRIDE;
+
+ virtual void StartSend() OVERRIDE;
+
+ virtual void StopSend() OVERRIDE;
+
+ virtual bool SetTargetBitrate(int min_bitrate, int max_bitrate,
+ const std::vector<SimulcastStream>& streams)
+ OVERRIDE;
+
+ virtual void GetSendCodec(VideoCodec* send_codec) OVERRIDE;
+
+ public:
+ bool DeliverRtcp(const uint8_t* packet, size_t length);
+
+ private:
+ TransportAdapter transport_adapter_;
+ VideoSendStream::Config config_;
+
+ ViEBase* video_engine_base_;
+ ViECapture* capture_;
+ ViECodec* codec_;
+ ViEExternalCapture* external_capture_;
+ ViEExternalCodec* external_codec_;
+ ViENetwork* network_;
+ ViERTP_RTCP* rtp_rtcp_;
+ ViEImageProcess* image_process_;
+
+ int channel_;
+ int capture_id_;
+ scoped_ptr<ResolutionAdaptor> overuse_observer_;
+};
+} // namespace internal
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_INTERNAL_VIDEO_SEND_STREAM_H_
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
new file mode 100644
index 00000000..edcbb2c9
--- /dev/null
+++ b/video/video_send_stream_tests.cc
@@ -0,0 +1,613 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <algorithm> // max
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/call.h"
+#include "webrtc/common_video/interface/i420_video_frame.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/sleep.h"
+#include "webrtc/system_wrappers/interface/thread_wrapper.h"
+#include "webrtc/test/direct_transport.h"
+#include "webrtc/test/fake_encoder.h"
+#include "webrtc/test/frame_generator_capturer.h"
+#include "webrtc/test/null_transport.h"
+#include "webrtc/video/transport_adapter.h"
+#include "webrtc/video_send_stream.h"
+
+namespace webrtc {
+
+class SendTransportObserver : public test::NullTransport {
+ public:
+ explicit SendTransportObserver(unsigned long timeout_ms)
+ : rtp_header_parser_(RtpHeaderParser::Create()),
+ send_test_complete_(EventWrapper::Create()),
+ timeout_ms_(timeout_ms) {}
+
+ EventTypeWrapper Wait() { return send_test_complete_->Wait(timeout_ms_); }
+
+ protected:
+ scoped_ptr<RtpHeaderParser> rtp_header_parser_;
+ scoped_ptr<EventWrapper> send_test_complete_;
+
+ private:
+ unsigned long timeout_ms_;
+};
+
+class VideoSendStreamTest : public ::testing::Test {
+ public:
+ VideoSendStreamTest() : fake_encoder_(Clock::GetRealTimeClock()) {}
+
+ protected:
+ void RunSendTest(Call* call,
+ const VideoSendStream::Config& config,
+ SendTransportObserver* observer) {
+ VideoSendStream* send_stream = call->CreateSendStream(config);
+ scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
+ test::FrameGeneratorCapturer::Create(
+ send_stream->Input(), 320, 240, 30, Clock::GetRealTimeClock()));
+ send_stream->StartSend();
+ frame_generator_capturer->Start();
+
+ EXPECT_EQ(kEventSignaled, observer->Wait());
+
+ frame_generator_capturer->Stop();
+ send_stream->StopSend();
+ call->DestroySendStream(send_stream);
+ }
+
+ VideoSendStream::Config GetSendTestConfig(Call* call) {
+ VideoSendStream::Config config = call->GetDefaultSendConfig();
+ config.encoder = &fake_encoder_;
+ config.internal_source = false;
+ config.rtp.ssrcs.push_back(kSendSsrc);
+ test::FakeEncoder::SetCodecSettings(&config.codec, 1);
+ return config;
+ }
+
+ void TestNackRetransmission(uint32_t retransmit_ssrc);
+
+ static const uint32_t kSendSsrc;
+ static const uint32_t kSendRtxSsrc;
+
+ test::FakeEncoder fake_encoder_;
+};
+
+const uint32_t VideoSendStreamTest::kSendSsrc = 0xC0FFEE;
+const uint32_t VideoSendStreamTest::kSendRtxSsrc = 0xBADCAFE;
+
+TEST_F(VideoSendStreamTest, SendsSetSsrc) {
+ class SendSsrcObserver : public SendTransportObserver {
+ public:
+ SendSsrcObserver() : SendTransportObserver(30 * 1000) {}
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(
+ rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+
+ if (header.ssrc == kSendSsrc)
+ send_test_complete_->Set();
+
+ return true;
+ }
+ } observer;
+
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.rtp.max_packet_size = 128;
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
+TEST_F(VideoSendStreamTest, SupportsCName) {
+ static std::string kCName = "PjQatC14dGfbVwGPUOA9IH7RlsFDbWl4AhXEiDsBizo=";
+ class CNameObserver : public SendTransportObserver {
+ public:
+ CNameObserver() : SendTransportObserver(30 * 1000) {}
+
+ virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE {
+ RTCPUtility::RTCPParserV2 parser(packet, length, true);
+ EXPECT_TRUE(parser.IsValid());
+
+ RTCPUtility::RTCPPacketTypes packet_type = parser.Begin();
+ while (packet_type != RTCPUtility::kRtcpNotValidCode) {
+ if (packet_type == RTCPUtility::kRtcpSdesChunkCode) {
+ EXPECT_EQ(parser.Packet().CName.CName, kCName);
+ send_test_complete_->Set();
+ }
+
+ packet_type = parser.Iterate();
+ }
+
+ return true;
+ }
+ } observer;
+
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.rtp.c_name = kCName;
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
+TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
+ static const uint8_t kAbsSendTimeExtensionId = 13;
+ class AbsoluteSendTimeObserver : public SendTransportObserver {
+ public:
+ AbsoluteSendTimeObserver() : SendTransportObserver(30 * 1000) {
+ EXPECT_TRUE(rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, kAbsSendTimeExtensionId));
+ }
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(
+ rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+
+ if (header.extension.absoluteSendTime > 0)
+ send_test_complete_->Set();
+
+ return true;
+ }
+ } observer;
+
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.rtp.extensions.push_back(
+ RtpExtension("abs-send-time", kAbsSendTimeExtensionId));
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
+TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
+ static const uint8_t kTOffsetExtensionId = 13;
+ class DelayedEncoder : public test::FakeEncoder {
+ public:
+ explicit DelayedEncoder(Clock* clock) : test::FakeEncoder(clock) {}
+ virtual int32_t Encode(
+ const I420VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<VideoFrameType>* frame_types) OVERRIDE {
+ // A delay needs to be introduced to assure that we get a timestamp
+ // offset.
+ SleepMs(5);
+ return FakeEncoder::Encode(input_image, codec_specific_info, frame_types);
+ }
+ } encoder(Clock::GetRealTimeClock());
+
+ class TransmissionTimeOffsetObserver : public SendTransportObserver {
+ public:
+ TransmissionTimeOffsetObserver() : SendTransportObserver(30 * 1000) {
+ EXPECT_TRUE(rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset, kTOffsetExtensionId));
+ }
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(
+ rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+
+ EXPECT_GT(header.extension.transmissionTimeOffset, 0);
+ send_test_complete_->Set();
+
+ return true;
+ }
+ } observer;
+
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.encoder = &encoder;
+ send_config.rtp.extensions.push_back(
+ RtpExtension("toffset", kTOffsetExtensionId));
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
+class FakeReceiveStatistics : public NullReceiveStatistics {
+ public:
+ FakeReceiveStatistics(uint32_t send_ssrc,
+ uint32_t last_sequence_number,
+ uint32_t cumulative_lost,
+ uint8_t fraction_lost)
+ : lossy_stats_(new LossyStatistician(last_sequence_number,
+ cumulative_lost,
+ fraction_lost)) {
+ stats_map_[send_ssrc] = lossy_stats_.get();
+ }
+
+ virtual StatisticianMap GetActiveStatisticians() const OVERRIDE {
+ return stats_map_;
+ }
+
+ virtual StreamStatistician* GetStatistician(uint32_t ssrc) const OVERRIDE {
+ return lossy_stats_.get();
+ }
+
+ private:
+ class LossyStatistician : public StreamStatistician {
+ public:
+ LossyStatistician(uint32_t extended_max_sequence_number,
+ uint32_t cumulative_lost,
+ uint8_t fraction_lost) {
+ stats_.fraction_lost = fraction_lost;
+ stats_.cumulative_lost = cumulative_lost;
+ stats_.extended_max_sequence_number = extended_max_sequence_number;
+ }
+ virtual bool GetStatistics(Statistics* statistics, bool reset) OVERRIDE {
+ *statistics = stats_;
+ return true;
+ }
+ virtual void GetDataCounters(uint32_t* bytes_received,
+ uint32_t* packets_received) const OVERRIDE {
+ *bytes_received = 0;
+ *packets_received = 0;
+ }
+ virtual uint32_t BitrateReceived() const OVERRIDE { return 0; }
+ virtual void ResetStatistics() OVERRIDE {}
+ virtual bool IsRetransmitOfOldPacket(const RTPHeader& header,
+ int min_rtt) const OVERRIDE {
+ return false;
+ }
+
+ virtual bool IsPacketInOrder(uint16_t sequence_number) const OVERRIDE {
+ return true;
+ }
+ Statistics stats_;
+ };
+
+ scoped_ptr<LossyStatistician> lossy_stats_;
+ StatisticianMap stats_map_;
+};
+
+TEST_F(VideoSendStreamTest, SupportsFec) {
+ static const int kRedPayloadType = 118;
+ static const int kUlpfecPayloadType = 119;
+ class FecObserver : public SendTransportObserver {
+ public:
+ FecObserver()
+ : SendTransportObserver(30 * 1000),
+ transport_adapter_(&transport_),
+ send_count_(0),
+ received_media_(false),
+ received_fec_(false) {}
+
+ void SetReceiver(PacketReceiver* receiver) {
+ transport_.SetReceiver(receiver);
+ }
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(
+ rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+
+ // Send lossy receive reports to trigger FEC enabling.
+ if (send_count_++ % 2 != 0) {
+ // Receive statistics reporting having lost 50% of the packets.
+ FakeReceiveStatistics lossy_receive_stats(
+ kSendSsrc, header.sequenceNumber, send_count_ / 2, 127);
+ RTCPSender rtcp_sender(
+ 0, false, Clock::GetRealTimeClock(), &lossy_receive_stats);
+ EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_));
+
+ rtcp_sender.SetRTCPStatus(kRtcpNonCompound);
+ rtcp_sender.SetRemoteSSRC(kSendSsrc);
+
+ RTCPSender::FeedbackState feedback_state;
+
+ EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
+ }
+
+ EXPECT_EQ(kRedPayloadType, header.payloadType);
+
+ uint8_t encapsulated_payload_type = packet[header.headerLength];
+
+ if (encapsulated_payload_type == kUlpfecPayloadType) {
+ received_fec_ = true;
+ } else {
+ received_media_ = true;
+ }
+
+ if (received_media_ && received_fec_)
+ send_test_complete_->Set();
+
+ return true;
+ }
+
+ private:
+ internal::TransportAdapter transport_adapter_;
+ test::DirectTransport transport_;
+ int send_count_;
+ bool received_media_;
+ bool received_fec_;
+ } observer;
+
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ observer.SetReceiver(call->Receiver());
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.rtp.fec.red_payload_type = kRedPayloadType;
+ send_config.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
+void VideoSendStreamTest::TestNackRetransmission(uint32_t retransmit_ssrc) {
+ class NackObserver : public SendTransportObserver {
+ public:
+ explicit NackObserver(uint32_t retransmit_ssrc)
+ : SendTransportObserver(30 * 1000),
+ transport_adapter_(&transport_),
+ send_count_(0),
+ retransmit_ssrc_(retransmit_ssrc),
+ nacked_sequence_number_(0) {}
+
+ void SetReceiver(PacketReceiver* receiver) {
+ transport_.SetReceiver(receiver);
+ }
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(
+ rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+
+ // Nack second packet after receiving the third one.
+ if (++send_count_ == 3) {
+ nacked_sequence_number_ = header.sequenceNumber - 1;
+ NullReceiveStatistics null_stats;
+ RTCPSender rtcp_sender(
+ 0, false, Clock::GetRealTimeClock(), &null_stats);
+ EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_));
+
+ rtcp_sender.SetRTCPStatus(kRtcpNonCompound);
+ rtcp_sender.SetRemoteSSRC(kSendSsrc);
+
+ RTCPSender::FeedbackState feedback_state;
+
+ EXPECT_EQ(0,
+ rtcp_sender.SendRTCP(
+ feedback_state, kRtcpNack, 1, &nacked_sequence_number_));
+ }
+
+ uint16_t sequence_number = header.sequenceNumber;
+
+ if (header.ssrc == retransmit_ssrc_ && retransmit_ssrc_ != kSendSsrc) {
+ // Not kSendSsrc, assume correct RTX packet. Extract sequence number.
+ const uint8_t* rtx_header = packet + header.headerLength;
+ sequence_number = (rtx_header[0] << 8) + rtx_header[1];
+ }
+
+ if (sequence_number == nacked_sequence_number_) {
+ EXPECT_EQ(retransmit_ssrc_, header.ssrc);
+ send_test_complete_->Set();
+ }
+
+ return true;
+ }
+
+ private:
+ internal::TransportAdapter transport_adapter_;
+ test::DirectTransport transport_;
+ int send_count_;
+ uint32_t retransmit_ssrc_;
+ uint16_t nacked_sequence_number_;
+ } observer(retransmit_ssrc);
+
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+ observer.SetReceiver(call->Receiver());
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.rtp.nack.rtp_history_ms = 1000;
+ if (retransmit_ssrc != kSendSsrc)
+ send_config.rtp.rtx.ssrcs.push_back(retransmit_ssrc);
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
+TEST_F(VideoSendStreamTest, RetransmitsNack) {
+ // Normal NACKs should use the send SSRC.
+ TestNackRetransmission(kSendSsrc);
+}
+
+TEST_F(VideoSendStreamTest, RetransmitsNackOverRtx) {
+ // NACKs over RTX should use a separate SSRC.
+ TestNackRetransmission(kSendRtxSsrc);
+}
+
+TEST_F(VideoSendStreamTest, MaxPacketSize) {
+ class PacketSizeObserver : public SendTransportObserver {
+ public:
+ PacketSizeObserver(size_t max_length) : SendTransportObserver(30 * 1000),
+ max_length_(max_length), accumulated_size_(0) {}
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(
+ rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+
+ EXPECT_LE(length, max_length_);
+
+ accumulated_size_ += length;
+
+ // Marker bit set indicates last fragment of a packet
+ if (header.markerBit) {
+ if (accumulated_size_ + length > max_length_) {
+ // The packet was fragmented, total size was larger than max size,
+ // but size of individual fragments were within size limit => pass!
+ send_test_complete_->Set();
+ }
+ accumulated_size_ = 0; // Last fragment, reset packet size
+ }
+
+ return true;
+ }
+
+ private:
+ size_t max_length_;
+ size_t accumulated_size_;
+ };
+
+ static const uint32_t kMaxPacketSize = 128;
+
+ PacketSizeObserver observer(kMaxPacketSize);
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.rtp.max_packet_size = kMaxPacketSize;
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
+// The test will go through a number of phases.
+// 1. Start sending packets.
+// 2. As soon as the RTP stream has been detected, signal a low REMB value to
+// activate the auto muter.
+// 3. Wait until |kMuteTimeFrames| have been captured without seeing any RTP
+// packets.
+// 4. Signal a high REMB and the wait for the RTP stream to start again.
+// When the stream is detected again, the test ends.
+TEST_F(VideoSendStreamTest, AutoMute) {
+ static const int kMuteTimeFrames = 60; // Mute for 2 seconds @ 30 fps.
+
+ class RembObserver : public SendTransportObserver, public I420FrameCallback {
+ public:
+ RembObserver()
+ : SendTransportObserver(30 * 1000), // Timeout after 30 seconds.
+ transport_adapter_(&transport_),
+ clock_(Clock::GetRealTimeClock()),
+ test_state_(kBeforeMute),
+ rtp_count_(0),
+ last_sequence_number_(0),
+ mute_frame_count_(0),
+ low_remb_bps_(0),
+ high_remb_bps_(0),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {}
+
+ void SetReceiver(PacketReceiver* receiver) {
+ transport_.SetReceiver(receiver);
+ }
+
+ virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE {
+ // Receive statistics reporting having lost 0% of the packets.
+ // This is needed for the send-side bitrate controller to work properly.
+ CriticalSectionScoped lock(crit_sect_.get());
+ SendRtcpFeedback(0); // REMB is only sent if value is > 0.
+ return true;
+ }
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ CriticalSectionScoped lock(crit_sect_.get());
+ ++rtp_count_;
+ RTPHeader header;
+ EXPECT_TRUE(
+ rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+ last_sequence_number_ = header.sequenceNumber;
+
+ if (test_state_ == kBeforeMute) {
+ // The stream has started. Try to mute it.
+ SendRtcpFeedback(low_remb_bps_);
+ test_state_ = kDuringMute;
+ } else if (test_state_ == kDuringMute) {
+ mute_frame_count_ = 0;
+ } else if (test_state_ == kWaitingForPacket) {
+ send_test_complete_->Set();
+ }
+
+ return true;
+ }
+
+ // This method implements the I420FrameCallback.
+ void FrameCallback(I420VideoFrame* video_frame) OVERRIDE {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (test_state_ == kDuringMute && ++mute_frame_count_ > kMuteTimeFrames) {
+ SendRtcpFeedback(high_remb_bps_);
+ test_state_ = kWaitingForPacket;
+ }
+ }
+
+ void set_low_remb_bps(int value) { low_remb_bps_ = value; }
+
+ void set_high_remb_bps(int value) { high_remb_bps_ = value; }
+
+ private:
+ enum TestState {
+ kBeforeMute,
+ kDuringMute,
+ kWaitingForPacket,
+ kAfterMute
+ };
+
+ virtual void SendRtcpFeedback(int remb_value) {
+ FakeReceiveStatistics receive_stats(
+ kSendSsrc, last_sequence_number_, rtp_count_, 0);
+ RTCPSender rtcp_sender(0, false, clock_, &receive_stats);
+ EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_));
+
+ rtcp_sender.SetRTCPStatus(kRtcpNonCompound);
+ rtcp_sender.SetRemoteSSRC(kSendSsrc);
+ if (remb_value > 0) {
+ rtcp_sender.SetREMBStatus(true);
+ rtcp_sender.SetREMBData(remb_value, 0, NULL);
+ }
+ RTCPSender::FeedbackState feedback_state;
+ EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
+ }
+
+ internal::TransportAdapter transport_adapter_;
+ test::DirectTransport transport_;
+ Clock* clock_;
+ TestState test_state_;
+ int rtp_count_;
+ int last_sequence_number_;
+ int mute_frame_count_;
+ int low_remb_bps_;
+ int high_remb_bps_;
+ scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ } observer;
+
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+ observer.SetReceiver(call->Receiver());
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.rtp.nack.rtp_history_ms = 1000;
+ send_config.pre_encode_callback = &observer;
+ send_config.auto_mute = true;
+ unsigned int min_bitrate_bps =
+ send_config.codec.simulcastStream[0].minBitrate * 1000;
+ observer.set_low_remb_bps(min_bitrate_bps - 10000);
+ unsigned int threshold_window = std::max(min_bitrate_bps / 10, 10000u);
+ ASSERT_GT(send_config.codec.simulcastStream[0].maxBitrate * 1000,
+ min_bitrate_bps + threshold_window + 5000);
+ observer.set_high_remb_bps(min_bitrate_bps + threshold_window + 5000);
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
+} // namespace webrtc
diff --git a/video/webrtc_video.gypi b/video/webrtc_video.gypi
new file mode 100644
index 00000000..78f4d1ae
--- /dev/null
+++ b/video/webrtc_video.gypi
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'variables': {
+ 'webrtc_video_dependencies': [
+ '<(webrtc_root)/video_engine/video_engine.gyp:*',
+ ],
+ 'webrtc_video_sources': [
+ 'video/transport_adapter.cc',
+ 'video/transport_adapter.h',
+ 'video/video_receive_stream.cc',
+ 'video/video_receive_stream.h',
+ 'video/video_send_stream.cc',
+ 'video/video_send_stream.h',
+ ],
+ },
+}