summaryrefslogtreecommitdiff
path: root/media/cast
diff options
context:
space:
mode:
authorPrimiano Tucci <primiano@google.com>2014-09-30 14:45:55 +0100
committerPrimiano Tucci <primiano@google.com>2014-09-30 14:45:55 +0100
commit1320f92c476a1ad9d19dba2a48c72b75566198e9 (patch)
treeea7f149ccad687b22c18a72b729646568b2d54fb /media/cast
parent39b78c562f50ad7d5551ee861121f899239525a2 (diff)
downloadchromium_org-1320f92c476a1ad9d19dba2a48c72b75566198e9.tar.gz
Merge from Chromium at DEPS revision 267aeeb8d85c
This commit was generated by merge_to_master.py. Change-Id: Id3aac9713b301fae64408cdaee0888724eeb7c0e
Diffstat (limited to 'media/cast')
-rw-r--r--media/cast/BUILD.gn99
-rw-r--r--media/cast/cast.gyp40
-rw-r--r--media/cast/cast_config.cc4
-rw-r--r--media/cast/cast_config.h14
-rw-r--r--media/cast/cast_defines.h9
-rw-r--r--media/cast/cast_environment.cc9
-rw-r--r--media/cast/cast_environment.h8
-rw-r--r--media/cast/cast_sender_impl.cc37
-rw-r--r--media/cast/cast_sender_impl.h5
-rw-r--r--media/cast/cast_testing.gypi2
-rw-r--r--media/cast/common/mod_util.h54
-rw-r--r--media/cast/logging/encoding_event_subscriber.cc1
-rw-r--r--media/cast/logging/proto/BUILD.gn7
-rw-r--r--media/cast/logging/raw_event_subscriber_bundle.cc3
-rw-r--r--media/cast/logging/receiver_time_offset_estimator_impl.cc182
-rw-r--r--media/cast/logging/receiver_time_offset_estimator_impl.h73
-rw-r--r--media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc73
-rw-r--r--media/cast/logging/stats_event_subscriber.cc264
-rw-r--r--media/cast/logging/stats_event_subscriber.h62
-rw-r--r--media/cast/logging/stats_event_subscriber_unittest.cc18
-rw-r--r--media/cast/net/cast_transport_config.cc2
-rw-r--r--media/cast/net/cast_transport_config.h4
-rw-r--r--media/cast/net/cast_transport_sender.h13
-rw-r--r--media/cast/net/cast_transport_sender_impl.cc75
-rw-r--r--media/cast/net/cast_transport_sender_impl.h16
-rw-r--r--media/cast/net/cast_transport_sender_impl_unittest.cc46
-rw-r--r--media/cast/net/pacing/paced_sender.cc54
-rw-r--r--media/cast/net/pacing/paced_sender.h13
-rw-r--r--media/cast/net/pacing/paced_sender_unittest.cc8
-rw-r--r--media/cast/net/rtcp/rtcp.cc104
-rw-r--r--media/cast/net/rtcp/rtcp.h33
-rw-r--r--media/cast/net/rtcp/rtcp_builder.cc (renamed from media/cast/net/rtcp/rtcp_sender.cc)395
-rw-r--r--media/cast/net/rtcp/rtcp_builder.h (renamed from media/cast/net/rtcp/rtcp_sender.h)75
-rw-r--r--media/cast/net/rtcp/rtcp_builder_unittest.cc (renamed from media/cast/net/rtcp/rtcp_sender_unittest.cc)258
-rw-r--r--media/cast/net/rtcp/rtcp_defines.cc3
-rw-r--r--media/cast/net/rtcp/rtcp_defines.h15
-rw-r--r--media/cast/net/rtcp/rtcp_unittest.cc450
-rw-r--r--media/cast/net/rtcp/rtcp_utility.cc2
-rw-r--r--media/cast/net/rtcp/rtcp_utility_unittest.cc8
-rw-r--r--media/cast/net/rtp/cast_message_builder.cc19
-rw-r--r--media/cast/net/rtp/cast_message_builder.h8
-rw-r--r--media/cast/net/rtp/cast_message_builder_unittest.cc29
-rw-r--r--media/cast/net/rtp/frame_buffer.cc32
-rw-r--r--media/cast/net/rtp/frame_buffer.h7
-rw-r--r--media/cast/net/rtp/frame_id_map.cc247
-rw-r--r--media/cast/net/rtp/frame_id_map.h90
-rw-r--r--media/cast/net/rtp/framer.cc212
-rw-r--r--media/cast/net/rtp/framer.h30
-rw-r--r--media/cast/net/rtp/packet_storage.cc47
-rw-r--r--media/cast/net/rtp/packet_storage.h29
-rw-r--r--media/cast/net/rtp/packet_storage_unittest.cc70
-rw-r--r--media/cast/net/rtp/rtp_packetizer_unittest.cc12
-rw-r--r--media/cast/net/rtp/rtp_sender.cc16
-rw-r--r--media/cast/net/rtp/rtp_sender.h2
-rw-r--r--media/cast/net/udp_transport.cc19
-rw-r--r--media/cast/receiver/audio_decoder.cc5
-rw-r--r--media/cast/receiver/cast_receiver_impl.cc6
-rw-r--r--media/cast/receiver/frame_receiver.cc21
-rw-r--r--media/cast/receiver/frame_receiver.h7
-rw-r--r--media/cast/receiver/video_decoder.cc11
-rw-r--r--media/cast/receiver/video_decoder_unittest.cc12
-rw-r--r--media/cast/sender/audio_encoder.cc30
-rw-r--r--media/cast/sender/audio_encoder.h6
-rw-r--r--media/cast/sender/audio_encoder_unittest.cc3
-rw-r--r--media/cast/sender/audio_sender.cc211
-rw-r--r--media/cast/sender/audio_sender.h61
-rw-r--r--media/cast/sender/audio_sender_unittest.cc1
-rw-r--r--media/cast/sender/congestion_control.cc157
-rw-r--r--media/cast/sender/congestion_control.h69
-rw-r--r--media/cast/sender/congestion_control_unittest.cc2
-rw-r--r--media/cast/sender/external_video_encoder.cc38
-rw-r--r--media/cast/sender/external_video_encoder.h7
-rw-r--r--media/cast/sender/external_video_encoder_unittest.cc6
-rw-r--r--media/cast/sender/frame_sender.cc338
-rw-r--r--media/cast/sender/frame_sender.h126
-rw-r--r--media/cast/sender/rtp_timestamp_helper.cc36
-rw-r--r--media/cast/sender/rtp_timestamp_helper.h41
-rw-r--r--media/cast/sender/video_encoder_impl.cc6
-rw-r--r--media/cast/sender/video_encoder_impl_unittest.cc188
-rw-r--r--media/cast/sender/video_sender.cc343
-rw-r--r--media/cast/sender/video_sender.h95
-rw-r--r--media/cast/sender/video_sender_unittest.cc68
-rw-r--r--media/cast/sender/vp8_encoder.cc218
-rw-r--r--media/cast/sender/vp8_encoder.h25
-rw-r--r--media/cast/test/cast_benchmarks.cc26
-rw-r--r--media/cast/test/end2end_unittest.cc136
-rw-r--r--media/cast/test/fake_media_source.cc9
-rw-r--r--media/cast/test/fake_video_encode_accelerator.cc8
-rw-r--r--media/cast/test/fake_video_encode_accelerator.h6
-rw-r--r--media/cast/test/proto/BUILD.gn9
-rw-r--r--media/cast/test/sender.cc7
-rw-r--r--media/cast/test/simulator.cc46
-rw-r--r--media/cast/test/utility/default_config.cc4
-rw-r--r--media/cast/test/utility/in_process_receiver.cc2
-rw-r--r--media/cast/test/utility/udp_proxy.cc11
95 files changed, 3009 insertions, 2799 deletions
diff --git a/media/cast/BUILD.gn b/media/cast/BUILD.gn
index e1c9e66330..15d76f85ee 100644
--- a/media/cast/BUILD.gn
+++ b/media/cast/BUILD.gn
@@ -45,16 +45,14 @@ source_set("common") {
"logging/stats_event_subscriber.h",
]
+ public_deps = [
+ "//media/cast/logging/proto",
+ ]
deps = [
"//base",
"//crypto",
- "//media/cast/logging/proto",
"//net",
]
-
- forward_dependent_configs_from = [
- "//media/cast/logging/proto",
- ]
}
source_set("net") {
@@ -72,8 +70,8 @@ source_set("net") {
"net/rtcp/rtcp_defines.h",
"net/rtcp/rtcp.h",
"net/rtcp/rtcp.cc",
- "net/rtcp/rtcp_sender.cc",
- "net/rtcp/rtcp_sender.h",
+ "net/rtcp/rtcp_builder.cc",
+ "net/rtcp/rtcp_builder.h",
"net/rtcp/rtcp_utility.cc",
"net/rtcp/rtcp_utility.h",
"net/rtp/packet_storage.cc",
@@ -88,8 +86,6 @@ source_set("net") {
"net/rtp/cast_message_builder.h",
"net/rtp/frame_buffer.cc",
"net/rtp/frame_buffer.h",
- "net/rtp/frame_id_map.cc",
- "net/rtp/frame_id_map.h",
"net/rtp/framer.cc",
"net/rtp/framer.h",
"net/rtp/receiver_stats.cc",
@@ -108,8 +104,7 @@ source_set("net") {
source_set("sender") {
sources = [
"cast_sender.h",
-# TODO(hclam): libvpx support.
-# "cast_sender_impl.cc",
+ "cast_sender_impl.cc",
"cast_sender_impl.h",
"sender/audio_encoder.h",
"sender/audio_encoder.cc",
@@ -123,17 +118,14 @@ source_set("sender") {
"sender/fake_software_video_encoder.cc",
"sender/frame_sender.cc",
"sender/frame_sender.h",
- "sender/rtp_timestamp_helper.cc",
- "sender/rtp_timestamp_helper.h",
"sender/software_video_encoder.h",
-# TODO(hclam): libvpx support.
-# "sender/video_encoder.h",
-# "sender/video_encoder_impl.h",
-# "sender/video_encoder_impl.cc",
-# "sender/video_sender.h",
-# "sender/video_sender.cc",
-# "sender/vp8_encoder.cc",
-# "sender/vp8_encoder.h",
+ "sender/video_encoder.h",
+ "sender/video_encoder_impl.h",
+ "sender/video_encoder_impl.cc",
+ "sender/video_sender.h",
+ "sender/video_sender.cc",
+ "sender/vp8_encoder.cc",
+ "sender/vp8_encoder.h",
]
deps = [
@@ -141,10 +133,27 @@ source_set("sender") {
":net",
"//media",
"//media:shared_memory_support",
+ "//third_party/libvpx",
"//third_party/opus",
-# TODO(hclam): libvpx support.
-# "//third_party/libvpx",
]
+
+ # use a restricted subset of media and no software codecs on iOS
+ if (is_ios) {
+ deps += [ "//media:media_for_cast_ios" ]
+ deps -= [
+ "//media",
+ "//third_party/libvpx",
+ "//third_party/opus",
+ ]
+ sources -= [
+ "sender/external_video_encoder.h",
+ "sender/external_video_encoder.cc",
+ "sender/video_encoder_impl.h",
+ "sender/video_encoder_impl.cc",
+ "sender/vp8_encoder.cc",
+ "sender/vp8_encoder.h",
+ ]
+ }
}
source_set("receiver") {
@@ -152,14 +161,12 @@ source_set("receiver") {
"cast_receiver.h",
"receiver/audio_decoder.cc",
"receiver/audio_decoder.h",
-# TODO(hclam): Libvpx support.
-# "receiver/cast_receiver_impl.cc",
-# "receiver/cast_receiver_impl.h",
+ "receiver/cast_receiver_impl.cc",
+ "receiver/cast_receiver_impl.h",
"receiver/frame_receiver.cc",
"receiver/frame_receiver.h",
-# TODO(hclam): Need libvpx support.
-# "receiver/video_decoder.cc",
-# "receiver/video_decoder.h",
+ "receiver/video_decoder.cc",
+ "receiver/video_decoder.h",
]
deps = [
@@ -167,14 +174,24 @@ source_set("receiver") {
":net",
"//media",
"//media:shared_memory_support",
+ "//third_party/libvpx",
"//third_party/opus",
-# TODO(hclam): libvpx support.
-# "//third_party/libvpx",
"//ui/gfx",
]
+
+ # use a restricted subset of media and no software codecs on iOS
+ if (is_ios) {
+ deps += [ "//media:media_for_cast_ios" ]
+ deps -= [
+ "//media",
+ "//third_party/libvpx",
+ "//third_party/opus",
+ ]
+ }
}
source_set("test_support") {
+ testonly = true
sources = [
# TODO(hclam): FFmpeg.
# "test/fake_media_source.cc",
@@ -193,9 +210,8 @@ source_set("test_support") {
"test/utility/barcode.h",
"test/utility/default_config.cc",
"test/utility/default_config.h",
-# TODO(hclam): libvpx support.
-# "test/utility/in_process_receiver.cc",
-# "test/utility/in_process_receiver.h",
+ "test/utility/in_process_receiver.cc",
+ "test/utility/in_process_receiver.h",
"test/utility/input_builder.cc",
"test/utility/input_builder.h",
"test/utility/net_utility.cc",
@@ -235,7 +251,7 @@ test("cast_unittests") {
"net/pacing/mock_paced_packet_sender.cc",
"net/pacing/mock_paced_packet_sender.h",
"net/pacing/paced_sender_unittest.cc",
- "net/rtcp/rtcp_sender_unittest.cc",
+ "net/rtcp/rtcp_builder_unittest.cc",
"net/rtcp/rtcp_unittest.cc",
"net/rtcp/rtcp_utility_unittest.cc",
"net/rtcp/receiver_rtcp_event_subscriber_unittest.cc",
@@ -258,16 +274,14 @@ test("cast_unittests") {
"net/udp_transport_unittest.cc",
"receiver/audio_decoder_unittest.cc",
"receiver/frame_receiver_unittest.cc",
-# TODO(hclam): libvpx support.
-# "receiver/video_decoder_unittest.cc",
+ "receiver/video_decoder_unittest.cc",
"sender/audio_encoder_unittest.cc",
"sender/audio_sender_unittest.cc",
"sender/congestion_control_unittest.cc",
"sender/external_video_encoder_unittest.cc",
-# TODO(hclam): libvpx support.
-# "sender/video_encoder_impl_unittest.cc",
-# "sender/video_sender_unittest.cc",
-# "test/end2end_unittest.cc",
+ "sender/video_encoder_impl_unittest.cc",
+ "sender/video_sender_unittest.cc",
+ "test/end2end_unittest.cc",
"test/fake_receiver_time_offset_estimator.cc",
"test/fake_receiver_time_offset_estimator.h",
"test/fake_video_encode_accelerator.cc",
@@ -289,6 +303,7 @@ test("cast_unittests") {
}
executable("generate_barcode_video") {
+ testonly = true
sources = [
"test/utility/generate_barcode_video.cc",
]
@@ -301,6 +316,7 @@ executable("generate_barcode_video") {
}
executable("generate_timecode_audio") {
+ testonly = true
sources = [
"test/utility/generate_timecode_audio.cc",
]
@@ -313,6 +329,7 @@ executable("generate_timecode_audio") {
}
executable("udp_proxy") {
+ testonly = true
sources = [
"test/utility/udp_proxy_main.cc",
]
diff --git a/media/cast/cast.gyp b/media/cast/cast.gyp
index 2f83f75cf2..f0d4b79a27 100644
--- a/media/cast/cast.gyp
+++ b/media/cast/cast.gyp
@@ -111,8 +111,6 @@
'net/rtp/cast_message_builder.h',
'net/rtp/frame_buffer.cc',
'net/rtp/frame_buffer.h',
- 'net/rtp/frame_id_map.cc',
- 'net/rtp/frame_id_map.h',
'net/rtp/framer.cc',
'net/rtp/framer.h',
'net/rtp/receiver_stats.cc',
@@ -122,6 +120,17 @@
'net/rtp/rtp_receiver_defines.cc',
'net/rtp/rtp_receiver_defines.h',
], # source
+ 'conditions': [
+ # use a restricted subset of media and no software codecs on iOS
+ ['OS=="ios"', {
+ 'dependencies': [ '<(DEPTH)/media/media.gyp:media_for_cast_ios' ],
+ 'dependencies!': [
+ '<(DEPTH)/media/media.gyp:media',
+ '<(DEPTH)/third_party/opus/opus.gyp:opus',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
+ ],
+ }], # OS=="ios"
+ ], # conditions
},
{
# GN version: //media/cast:sender
@@ -152,10 +161,8 @@
'sender/external_video_encoder.cc',
'sender/fake_software_video_encoder.h',
'sender/fake_software_video_encoder.cc',
- 'sender/frame_sender.cc',
+ 'sender/frame_sender.cc',
'sender/frame_sender.h',
- 'sender/rtp_timestamp_helper.cc',
- 'sender/rtp_timestamp_helper.h',
'sender/software_video_encoder.h',
'sender/video_encoder.h',
'sender/video_encoder_impl.h',
@@ -165,6 +172,25 @@
'sender/vp8_encoder.cc',
'sender/vp8_encoder.h',
], # source
+ 'conditions': [
+ # use a restricted subset of media and no software codecs on iOS
+ ['OS=="ios"', {
+ 'dependencies': [ '<(DEPTH)/media/media.gyp:media_for_cast_ios' ],
+ 'dependencies!': [
+ '<(DEPTH)/media/media.gyp:media',
+ '<(DEPTH)/third_party/opus/opus.gyp:opus',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
+ ],
+ 'sources!': [
+ 'sender/external_video_encoder.h',
+ 'sender/external_video_encoder.cc',
+ 'sender/video_encoder_impl.h',
+ 'sender/video_encoder_impl.cc',
+ 'sender/vp8_encoder.cc',
+ 'sender/vp8_encoder.h',
+ ],
+ }], # OS=="ios"
+ ], # conditions
},
{
# GN version: //media/cast:net
@@ -188,12 +214,12 @@
'net/pacing/paced_sender.cc',
'net/pacing/paced_sender.h',
'net/rtcp/receiver_rtcp_event_subscriber.cc',
+ 'net/rtcp/rtcp_builder.cc',
+ 'net/rtcp/rtcp_builder.h',
'net/rtcp/rtcp_defines.cc',
'net/rtcp/rtcp_defines.h',
'net/rtcp/rtcp.h',
'net/rtcp/rtcp.cc',
- 'net/rtcp/rtcp_sender.cc',
- 'net/rtcp/rtcp_sender.h',
'net/rtcp/rtcp_utility.cc',
'net/rtcp/rtcp_utility.h',
'net/rtp/packet_storage.cc',
diff --git a/media/cast/cast_config.cc b/media/cast/cast_config.cc
index b0e082810e..56b20f6063 100644
--- a/media/cast/cast_config.cc
+++ b/media/cast/cast_config.cc
@@ -25,7 +25,7 @@ VideoSenderConfig::VideoSenderConfig()
: ssrc(0),
incoming_feedback_ssrc(0),
rtcp_interval(kDefaultRtcpIntervalMs),
- target_playout_delay(
+ max_playout_delay(
base::TimeDelta::FromMilliseconds(kDefaultRtpMaxDelayMs)),
rtp_payload_type(0),
use_external_encoder(false),
@@ -48,7 +48,7 @@ AudioSenderConfig::AudioSenderConfig()
: ssrc(0),
incoming_feedback_ssrc(0),
rtcp_interval(kDefaultRtcpIntervalMs),
- target_playout_delay(
+ max_playout_delay(
base::TimeDelta::FromMilliseconds(kDefaultRtpMaxDelayMs)),
rtp_payload_type(0),
use_external_encoder(false),
diff --git a/media/cast/cast_config.h b/media/cast/cast_config.h
index 3a27702dd5..62ad9c2353 100644
--- a/media/cast/cast_config.h
+++ b/media/cast/cast_config.h
@@ -38,12 +38,13 @@ struct AudioSenderConfig {
int rtcp_interval;
// The total amount of time between a frame's capture/recording on the sender
- // and its playback on the receiver (i.e., shown to a user). This is fixed as
- // a value large enough to give the system sufficient time to encode,
+ // and its playback on the receiver (i.e., shown to a user). This should be
+ // set to a value large enough to give the system sufficient time to encode,
// transmit/retransmit, receive, decode, and render; given its run-time
// environment (sender/receiver hardware performance, network conditions,
// etc.).
- base::TimeDelta target_playout_delay;
+ base::TimeDelta min_playout_delay;
+ base::TimeDelta max_playout_delay;
// RTP payload type enum: Specifies the type/encoding of frame data.
int rtp_payload_type;
@@ -74,12 +75,13 @@ struct VideoSenderConfig {
int rtcp_interval;
// The total amount of time between a frame's capture/recording on the sender
- // and its playback on the receiver (i.e., shown to a user). This is fixed as
- // a value large enough to give the system sufficient time to encode,
+ // and its playback on the receiver (i.e., shown to a user). This should be
+ // set to a value large enough to give the system sufficient time to encode,
// transmit/retransmit, receive, decode, and render; given its run-time
// environment (sender/receiver hardware performance, network conditions,
// etc.).
- base::TimeDelta target_playout_delay;
+ base::TimeDelta min_playout_delay;
+ base::TimeDelta max_playout_delay;
// RTP payload type enum: Specifies the type/encoding of frame data.
int rtp_payload_type;
diff --git a/media/cast/cast_defines.h b/media/cast/cast_defines.h
index faafddb885..76e252716e 100644
--- a/media/cast/cast_defines.h
+++ b/media/cast/cast_defines.h
@@ -32,7 +32,6 @@ const uint32 kStartFrameId = UINT32_C(0xffffffff);
// can handle wrap around and compare two frame IDs.
const int kMaxUnackedFrames = 120;
-const int kStartRttMs = 20;
const int64 kCastMessageUpdateIntervalMs = 33;
const int64 kNackRepeatIntervalMs = 30;
@@ -47,8 +46,7 @@ enum CastInitializationStatus {
STATUS_UNSUPPORTED_VIDEO_CODEC,
STATUS_INVALID_AUDIO_CONFIGURATION,
STATUS_INVALID_VIDEO_CONFIGURATION,
- STATUS_GPU_ACCELERATION_NOT_SUPPORTED,
- STATUS_GPU_ACCELERATION_ERROR,
+ STATUS_HW_VIDEO_ENCODER_NOT_SUPPORTED,
};
enum DefaultSettings {
@@ -195,6 +193,11 @@ inline base::TimeDelta RtpDeltaToTimeDelta(int64 rtp_delta, int rtp_timebase) {
return rtp_delta * base::TimeDelta::FromSeconds(1) / rtp_timebase;
}
+inline int64 TimeDeltaToRtpDelta(base::TimeDelta delta, int rtp_timebase) {
+ DCHECK_GT(rtp_timebase, 0);
+ return delta * rtp_timebase / base::TimeDelta::FromSeconds(1);
+}
+
} // namespace cast
} // namespace media
diff --git a/media/cast/cast_environment.cc b/media/cast/cast_environment.cc
index 93eb8c7252..f22f0a4eca 100644
--- a/media/cast/cast_environment.cc
+++ b/media/cast/cast_environment.cc
@@ -34,7 +34,8 @@ CastEnvironment::CastEnvironment(
CastEnvironment::~CastEnvironment() {
// Logging must be deleted on the main thread.
- if (main_thread_proxy_ && !main_thread_proxy_->RunsTasksOnCurrentThread()) {
+ if (main_thread_proxy_.get() &&
+ !main_thread_proxy_->RunsTasksOnCurrentThread()) {
main_thread_proxy_->PostTask(
FROM_HERE,
base::Bind(&DeleteLoggingOnMainThread, base::Passed(&logging_)));
@@ -73,13 +74,13 @@ scoped_refptr<SingleThreadTaskRunner> CastEnvironment::GetTaskRunner(
bool CastEnvironment::CurrentlyOn(ThreadId identifier) {
switch (identifier) {
case CastEnvironment::MAIN:
- return main_thread_proxy_ &&
+ return main_thread_proxy_.get() &&
main_thread_proxy_->RunsTasksOnCurrentThread();
case CastEnvironment::AUDIO:
- return audio_thread_proxy_ &&
+ return audio_thread_proxy_.get() &&
audio_thread_proxy_->RunsTasksOnCurrentThread();
case CastEnvironment::VIDEO:
- return video_thread_proxy_ &&
+ return video_thread_proxy_.get() &&
video_thread_proxy_->RunsTasksOnCurrentThread();
default:
NOTREACHED() << "Invalid thread identifier";
diff --git a/media/cast/cast_environment.h b/media/cast/cast_environment.h
index 1549747ee2..d3c9474fa3 100644
--- a/media/cast/cast_environment.h
+++ b/media/cast/cast_environment.h
@@ -64,13 +64,9 @@ class CastEnvironment : public base::RefCountedThreadSafe<CastEnvironment> {
scoped_refptr<base::SingleThreadTaskRunner> GetTaskRunner(
ThreadId identifier) const;
- bool HasAudioThread() {
- return audio_thread_proxy_ ? true : false;
- }
+ bool HasAudioThread() { return audio_thread_proxy_.get() ? true : false; }
- bool HasVideoThread() {
- return video_thread_proxy_ ? true : false;
- }
+ bool HasVideoThread() { return video_thread_proxy_.get() ? true : false; }
protected:
virtual ~CastEnvironment();
diff --git a/media/cast/cast_sender_impl.cc b/media/cast/cast_sender_impl.cc
index 19a054f513..7b77517730 100644
--- a/media/cast/cast_sender_impl.cc
+++ b/media/cast/cast_sender_impl.cc
@@ -77,7 +77,7 @@ class LocalAudioFrameInput : public AudioFrameInput {
scoped_ptr<CastSender> CastSender::Create(
scoped_refptr<CastEnvironment> cast_environment,
CastTransportSender* const transport_sender) {
- CHECK(cast_environment);
+ CHECK(cast_environment.get());
return scoped_ptr<CastSender>(
new CastSenderImpl(cast_environment, transport_sender));
}
@@ -88,7 +88,7 @@ CastSenderImpl::CastSenderImpl(
: cast_environment_(cast_environment),
transport_sender_(transport_sender),
weak_factory_(this) {
- CHECK(cast_environment);
+ CHECK(cast_environment.get());
}
void CastSenderImpl::InitializeAudio(
@@ -105,7 +105,6 @@ void CastSenderImpl::InitializeAudio(
const CastInitializationStatus status = audio_sender_->InitializationResult();
if (status == STATUS_AUDIO_INITIALIZED) {
- ssrc_of_audio_sender_ = audio_config.incoming_feedback_ssrc;
audio_frame_input_ =
new LocalAudioFrameInput(cast_environment_, audio_sender_->AsWeakPtr());
}
@@ -127,19 +126,16 @@ void CastSenderImpl::InitializeVideo(
VLOG(1) << "CastSenderImpl@" << this << "::InitializeVideo()";
- video_sender_.reset(new VideoSender(cast_environment_,
- video_config,
- create_vea_cb,
- create_video_encode_mem_cb,
- transport_sender_));
-
- const CastInitializationStatus status = video_sender_->InitializationResult();
- if (status == STATUS_VIDEO_INITIALIZED) {
- ssrc_of_video_sender_ = video_config.incoming_feedback_ssrc;
- video_frame_input_ =
- new LocalVideoFrameInput(cast_environment_, video_sender_->AsWeakPtr());
- }
- cast_initialization_cb.Run(status);
+ video_sender_.reset(new VideoSender(
+ cast_environment_,
+ video_config,
+ base::Bind(&CastSenderImpl::OnVideoInitialized,
+ weak_factory_.GetWeakPtr(), cast_initialization_cb),
+ create_vea_cb,
+ create_video_encode_mem_cb,
+ transport_sender_,
+ base::Bind(&CastSenderImpl::SetTargetPlayoutDelay,
+ weak_factory_.GetWeakPtr())));
if (audio_sender_) {
DCHECK(audio_sender_->GetTargetPlayoutDelay() ==
video_sender_->GetTargetPlayoutDelay());
@@ -170,5 +166,14 @@ void CastSenderImpl::SetTargetPlayoutDelay(
}
}
+void CastSenderImpl::OnVideoInitialized(
+ const CastInitializationCallback& initialization_cb,
+ media::cast::CastInitializationStatus result) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ video_frame_input_ =
+ new LocalVideoFrameInput(cast_environment_, video_sender_->AsWeakPtr());
+ initialization_cb.Run(result);
+}
+
} // namespace cast
} // namespace media
diff --git a/media/cast/cast_sender_impl.h b/media/cast/cast_sender_impl.h
index c4680abf16..e3f16fea2f 100644
--- a/media/cast/cast_sender_impl.h
+++ b/media/cast/cast_sender_impl.h
@@ -47,6 +47,9 @@ class CastSenderImpl : public CastSender {
private:
void ReceivedPacket(scoped_ptr<Packet> packet);
+ void OnVideoInitialized(
+ const CastInitializationCallback& initialization_cb,
+ media::cast::CastInitializationStatus result);
CastInitializationCallback initialization_callback_;
scoped_ptr<AudioSender> audio_sender_;
@@ -57,8 +60,6 @@ class CastSenderImpl : public CastSender {
// The transport sender is owned by the owner of the CastSender, and should be
// valid throughout the lifetime of the CastSender.
CastTransportSender* const transport_sender_;
- uint32 ssrc_of_audio_sender_;
- uint32 ssrc_of_video_sender_;
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<CastSenderImpl> weak_factory_;
diff --git a/media/cast/cast_testing.gypi b/media/cast/cast_testing.gypi
index ef12af72f2..a2af257227 100644
--- a/media/cast/cast_testing.gypi
+++ b/media/cast/cast_testing.gypi
@@ -86,7 +86,7 @@
'net/pacing/mock_paced_packet_sender.cc',
'net/pacing/mock_paced_packet_sender.h',
'net/pacing/paced_sender_unittest.cc',
- 'net/rtcp/rtcp_sender_unittest.cc',
+ 'net/rtcp/rtcp_builder_unittest.cc',
'net/rtcp/rtcp_unittest.cc',
'net/rtcp/rtcp_utility_unittest.cc',
'net/rtcp/receiver_rtcp_event_subscriber_unittest.cc',
diff --git a/media/cast/common/mod_util.h b/media/cast/common/mod_util.h
new file mode 100644
index 0000000000..b2f9dc5522
--- /dev/null
+++ b/media/cast/common/mod_util.h
@@ -0,0 +1,54 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAST_COMMON_MOD_UTIL_H_
+#define MEDIA_CAST_COMMON_MOD_UTIL_H_
+
+#include <map>
+#include "base/logging.h"
+
+namespace media {
+namespace cast {
+
+// MAP is a map<uint??, ...> where the unsigned integer is
+// assumed to wrap around, but only a small range is used at a time.
+// Return the oldest entry in the map.
+template<class MAP>
+typename MAP::iterator ModMapOldest(MAP* map) {
+ typename MAP::iterator ret = map->begin();
+ if (ret != map->end()) {
+ typename MAP::key_type lower_quarter = 0;
+ lower_quarter--;
+ lower_quarter >>= 1;
+ if (ret->first < lower_quarter) {
+ typename MAP::iterator tmp = map->upper_bound(lower_quarter * 3);
+ if (tmp != map->end())
+ ret = tmp;
+ }
+ }
+ return ret;
+}
+
+// MAP is a map<uint??, ...> where the unsigned integer is
+// assumed to wrap around, but only a small range is used at a time.
+// Returns the previous entry in the map.
+template<class MAP>
+typename MAP::iterator ModMapPrevious(MAP* map, typename MAP::iterator i) {
+ DCHECK(!map->empty());
+ typename MAP::iterator ret = i;
+ if (i == map->begin()) {
+ ret = map->end();
+ }
+ ret--;
+ if (i == ret)
+ return map->end();
+ if ((i->first - ret->first) > ((typename MAP::key_type(0) - 1)) >> 1)
+ return map->end();
+ return ret;
+}
+
+} // namespace cast
+} // namespace media
+
+#endif
diff --git a/media/cast/logging/encoding_event_subscriber.cc b/media/cast/logging/encoding_event_subscriber.cc
index 48cc911ba8..27225249e1 100644
--- a/media/cast/logging/encoding_event_subscriber.cc
+++ b/media/cast/logging/encoding_event_subscriber.cc
@@ -4,6 +4,7 @@
#include "media/cast/logging/encoding_event_subscriber.h"
+#include <algorithm>
#include <cstring>
#include <utility>
diff --git a/media/cast/logging/proto/BUILD.gn b/media/cast/logging/proto/BUILD.gn
index 2576c94129..8bf881a2b7 100644
--- a/media/cast/logging/proto/BUILD.gn
+++ b/media/cast/logging/proto/BUILD.gn
@@ -9,16 +9,13 @@ source_set("proto") {
sources = [
"proto_utils.cc",
]
- deps = [
- ":cast_logging_proto",
- ]
- forward_dependent_configs_from = [
+ public_deps = [
":cast_logging_proto",
]
}
proto_library("cast_logging_proto") {
- visibility = ":proto"
+ visibility = [ ":proto" ]
sources = [
"raw_events.proto",
]
diff --git a/media/cast/logging/raw_event_subscriber_bundle.cc b/media/cast/logging/raw_event_subscriber_bundle.cc
index 1946b6ce82..5dc5f793f5 100644
--- a/media/cast/logging/raw_event_subscriber_bundle.cc
+++ b/media/cast/logging/raw_event_subscriber_bundle.cc
@@ -53,8 +53,7 @@ RawEventSubscriberBundle::~RawEventSubscriberBundle() {
void RawEventSubscriberBundle::AddEventSubscribers(bool is_audio) {
if (!receiver_offset_estimator_.get()) {
- receiver_offset_estimator_.reset(
- new ReceiverTimeOffsetEstimatorImpl);
+ receiver_offset_estimator_.reset(new ReceiverTimeOffsetEstimatorImpl);
cast_environment_->Logging()->AddRawEventSubscriber(
receiver_offset_estimator_.get());
}
diff --git a/media/cast/logging/receiver_time_offset_estimator_impl.cc b/media/cast/logging/receiver_time_offset_estimator_impl.cc
index 44d5eb0b3d..d5116542d6 100644
--- a/media/cast/logging/receiver_time_offset_estimator_impl.cc
+++ b/media/cast/logging/receiver_time_offset_estimator_impl.cc
@@ -6,124 +6,140 @@
#include <utility>
#include "base/logging.h"
+#include "base/time/tick_clock.h"
#include "media/cast/logging/receiver_time_offset_estimator_impl.h"
namespace media {
namespace cast {
-// This should be large enough so that we can collect all 3 events before
-// the entry gets removed from the map.
-const size_t kMaxEventTimesMapSize = 100;
+ReceiverTimeOffsetEstimatorImpl::BoundCalculator::BoundCalculator()
+ : has_bound_(false) {}
+ReceiverTimeOffsetEstimatorImpl::BoundCalculator::~BoundCalculator() {}
+
+void ReceiverTimeOffsetEstimatorImpl::BoundCalculator::SetSent(
+ uint32 rtp,
+ uint32 packet_id,
+ bool audio,
+ base::TimeTicks t) {
+ uint64 key = (static_cast<uint64>(rtp) << 32) | (packet_id << 1) |
+ static_cast<uint64>(audio);
+ events_[key].first = t;
+ CheckUpdate(key);
+}
+
+void ReceiverTimeOffsetEstimatorImpl::BoundCalculator::SetReceived(
+ uint32 rtp,
+ uint16 packet_id,
+ bool audio,
+ base::TimeTicks t) {
+ uint64 key = (static_cast<uint64>(rtp) << 32) | (packet_id << 1) |
+ static_cast<uint64>(audio);
+ events_[key].second = t;
+ CheckUpdate(key);
+}
+
+void ReceiverTimeOffsetEstimatorImpl::BoundCalculator::UpdateBound(
+ base::TimeTicks sent, base::TimeTicks received) {
+ base::TimeDelta delta = received - sent;
+ if (has_bound_) {
+ if (delta < bound_) {
+ bound_ = delta;
+ } else {
+ bound_ += (delta - bound_) / kClockDriftSpeed;
+ }
+ } else {
+ bound_ = delta;
+ }
+ has_bound_ = true;
+ }
+
+void ReceiverTimeOffsetEstimatorImpl::BoundCalculator::CheckUpdate(
+ uint64 key) {
+ const TimeTickPair& ticks = events_[key];
+ if (!ticks.first.is_null() && !ticks.second.is_null()) {
+ UpdateBound(ticks.first, ticks.second);
+ events_.erase(key);
+ return;
+ }
-ReceiverTimeOffsetEstimatorImpl::ReceiverTimeOffsetEstimatorImpl()
- : bounded_(false) {}
+ if (events_.size() > kMaxEventTimesMapSize) {
+ EventMap::iterator i = ModMapOldest(&events_);
+ if (i != events_.end()) {
+ events_.erase(i);
+ }
+ }
+}
+
+ReceiverTimeOffsetEstimatorImpl::ReceiverTimeOffsetEstimatorImpl() {
+}
ReceiverTimeOffsetEstimatorImpl::~ReceiverTimeOffsetEstimatorImpl() {
DCHECK(thread_checker_.CalledOnValidThread());
}
+
void ReceiverTimeOffsetEstimatorImpl::OnReceiveFrameEvent(
const FrameEvent& frame_event) {
DCHECK(thread_checker_.CalledOnValidThread());
-
- if (frame_event.media_type != VIDEO_EVENT)
- return;
-
- CastLoggingEvent event = frame_event.type;
- if (event != FRAME_ENCODED && event != FRAME_ACK_SENT &&
- event != FRAME_ACK_RECEIVED)
- return;
-
- EventTimesMap::iterator it = event_times_map_.find(frame_event.rtp_timestamp);
- if (it == event_times_map_.end()) {
- EventTimes event_times;
- it = event_times_map_.insert(std::make_pair(frame_event.rtp_timestamp,
- event_times)).first;
- }
- switch (event) {
- case FRAME_ENCODED:
- // Encode is supposed to happen only once. If we see duplicate event,
- // throw away the entry.
- if (it->second.event_a_time.is_null()) {
- it->second.event_a_time = frame_event.timestamp;
- } else {
- event_times_map_.erase(it);
- return;
- }
- break;
+ switch (frame_event.type) {
case FRAME_ACK_SENT:
- if (it->second.event_b_time.is_null()) {
- it->second.event_b_time = frame_event.timestamp;
- } else if (it->second.event_b_time != frame_event.timestamp) {
- // Duplicate ack sent events are normal due to RTCP redundancy,
- // but they must have the same event timestamp.
- event_times_map_.erase(it);
- return;
- }
+ lower_bound_.SetSent(frame_event.rtp_timestamp,
+ 0,
+ frame_event.media_type == AUDIO_EVENT,
+ frame_event.timestamp);
break;
case FRAME_ACK_RECEIVED:
- // If there are duplicate ack received events, pick the one with the
- // smallest event timestamp so we can get a better bound.
- if (it->second.event_c_time.is_null()) {
- it->second.event_c_time = frame_event.timestamp;
- } else {
- it->second.event_c_time =
- std::min(frame_event.timestamp, it->second.event_c_time);
- }
+ lower_bound_.SetReceived(frame_event.rtp_timestamp,
+ 0,
+ frame_event.media_type == AUDIO_EVENT,
+ frame_event.timestamp);
break;
default:
- NOTREACHED();
- }
-
- if (!it->second.event_a_time.is_null() &&
- !it->second.event_b_time.is_null() &&
- !it->second.event_c_time.is_null()) {
- UpdateOffsetBounds(it->second);
- event_times_map_.erase(it);
+ // Ignored
+ break;
}
-
- // Keep the map size at most |kMaxEventTimesMapSize|.
- if (event_times_map_.size() > kMaxEventTimesMapSize)
- event_times_map_.erase(event_times_map_.begin());
}
bool ReceiverTimeOffsetEstimatorImpl::GetReceiverOffsetBounds(
base::TimeDelta* lower_bound,
base::TimeDelta* upper_bound) {
- if (!bounded_)
+ if (!lower_bound_.has_bound() || !upper_bound_.has_bound())
return false;
- *lower_bound = offset_lower_bound_;
- *upper_bound = offset_upper_bound_;
+ *lower_bound = -lower_bound_.bound();
+ *upper_bound = upper_bound_.bound();
+
+ // Sanitize the output, we don't want the upper bound to be
+ // lower than the lower bound, make them the same.
+ if (upper_bound < lower_bound) {
+ lower_bound += (lower_bound - upper_bound) / 2;
+ upper_bound = lower_bound;
+ }
return true;
}
void ReceiverTimeOffsetEstimatorImpl::OnReceivePacketEvent(
const PacketEvent& packet_event) {
- // Not interested in packet events.
DCHECK(thread_checker_.CalledOnValidThread());
-}
-
-void ReceiverTimeOffsetEstimatorImpl::UpdateOffsetBounds(
- const EventTimes& event) {
- base::TimeDelta lower_bound = event.event_b_time - event.event_c_time;
- base::TimeDelta upper_bound = event.event_b_time - event.event_a_time;
-
- if (bounded_) {
- lower_bound = std::max(lower_bound, offset_lower_bound_);
- upper_bound = std::min(upper_bound, offset_upper_bound_);
- }
-
- if (lower_bound > upper_bound) {
- VLOG(2) << "Got bogus offset bound values [" << lower_bound.InMilliseconds()
- << ", " << upper_bound.InMilliseconds() << "].";
- return;
+ switch (packet_event.type) {
+ case PACKET_SENT_TO_NETWORK:
+ upper_bound_.SetSent(packet_event.rtp_timestamp,
+ packet_event.packet_id,
+ packet_event.media_type == AUDIO_EVENT,
+ packet_event.timestamp);
+ break;
+ case PACKET_RECEIVED:
+ upper_bound_.SetReceived(packet_event.rtp_timestamp,
+ packet_event.packet_id,
+ packet_event.media_type == AUDIO_EVENT,
+ packet_event.timestamp);
+ break;
+ default:
+ // Ignored
+ break;
}
-
- offset_lower_bound_ = lower_bound;
- offset_upper_bound_ = upper_bound;
- bounded_ = true;
}
+
} // namespace cast
} // namespace media
diff --git a/media/cast/logging/receiver_time_offset_estimator_impl.h b/media/cast/logging/receiver_time_offset_estimator_impl.h
index 1d0f6c8357..768ccbdb0f 100644
--- a/media/cast/logging/receiver_time_offset_estimator_impl.h
+++ b/media/cast/logging/receiver_time_offset_estimator_impl.h
@@ -5,23 +5,33 @@
#ifndef MEDIA_CAST_LOGGING_RECEIVER_TIME_OFFSET_ESTIMATOR_IMPL_H_
#define MEDIA_CAST_LOGGING_RECEIVER_TIME_OFFSET_ESTIMATOR_IMPL_H_
+#include <map>
+
#include "base/time/time.h"
#include "base/threading/thread_checker.h"
+#include "media/cast/common/mod_util.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/logging/receiver_time_offset_estimator.h"
namespace media {
namespace cast {
-// This implementation listens to three types of video events:
-// 1. FRAME_ENCODED (sender side)
-// 2. FRAME_ACK_SENT (receiver side)
-// 3. FRAME_ACK_RECEIVED (sender side)
+
+// This should be large enough so that we can collect all 3 events before
+// the entry gets removed from the map.
+const size_t kMaxEventTimesMapSize = 500;
+
+// The lower, this is, the faster we adjust to clock drift.
+// (But with more jitter.)
+const size_t kClockDriftSpeed = 500;
+
+
+// This implementation listens to two pair of events
+// 1. FRAME_ACK_SENT / FRAME_ACK_RECEIVED (receiver->sender)
+// 2. PACKET_SENT_TO_NETWORK / PACKET_RECEIVED (sender->receiver)
// There is a causal relationship between these events in that these events
// must happen in order. This class obtains the lower and upper bounds for
-// the offset by taking the difference of timestamps (2) - (1) and (2) - (3),
-// respectively.
-// The bound will become better as the latency between the events decreases.
+// the offset by taking the difference of timestamps.
class ReceiverTimeOffsetEstimatorImpl : public ReceiverTimeOffsetEstimator {
public:
ReceiverTimeOffsetEstimatorImpl();
@@ -37,22 +47,47 @@ class ReceiverTimeOffsetEstimatorImpl : public ReceiverTimeOffsetEstimator {
base::TimeDelta* upper_bound) OVERRIDE;
private:
- struct EventTimes {
- base::TimeTicks event_a_time;
- base::TimeTicks event_b_time;
- base::TimeTicks event_c_time;
- };
+ // This helper uses the difference between sent and recived event
+ // to calculate an upper bound on the difference between the clocks
+ // on the sender and receiver. Note that this difference can take
+ // very large positive or negative values, but the smaller value is
+ // always the better estimate, since a receive event cannot possibly
+ // happen before a send event. Note that we use this to calculate
+ // both upper and lower bounds by reversing the sender/receiver
+ // relationship.
+ class BoundCalculator {
+ public:
+ typedef std::pair<base::TimeTicks, base::TimeTicks> TimeTickPair;
+ typedef std::map<uint64, TimeTickPair> EventMap;
- typedef std::map<RtpTimestamp, EventTimes> EventTimesMap;
+ BoundCalculator();
+ ~BoundCalculator();
+ bool has_bound() const { return has_bound_; }
+ base::TimeDelta bound() const { return bound_; }
- void UpdateOffsetBounds(const EventTimes& event);
+ void SetSent(uint32 rtp,
+ uint32 packet_id,
+ bool audio,
+ base::TimeTicks t);
- // Fixed size storage to store event times for recent frames.
- EventTimesMap event_times_map_;
+ void SetReceived(uint32 rtp,
+ uint16 packet_id,
+ bool audio,
+ base::TimeTicks t);
+
+ private:
+ void UpdateBound(base::TimeTicks a, base::TimeTicks b);
+ void CheckUpdate(uint64 key);
- bool bounded_;
- base::TimeDelta offset_lower_bound_;
- base::TimeDelta offset_upper_bound_;
+ private:
+ EventMap events_;
+ bool has_bound_;
+ base::TimeDelta bound_;
+ };
+
+ // Fixed size storage to store event times for recent frames.
+ BoundCalculator upper_bound_;
+ BoundCalculator lower_bound_;
base::ThreadChecker thread_checker_;
DISALLOW_COPY_AND_ASSIGN(ReceiverTimeOffsetEstimatorImpl);
diff --git a/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc b/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc
index 1cdbecf5de..c1059aec18 100644
--- a/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc
+++ b/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc
@@ -72,6 +72,13 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
true,
5678);
+ cast_environment_->Logging()->InsertPacketEvent(
+ sender_clock_->NowTicks(),
+ PACKET_SENT_TO_NETWORK, VIDEO_EVENT,
+ rtp_timestamp,
+ frame_id,
+ 56, 78, 1500);
+
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
AdvanceClocks(base::TimeDelta::FromMilliseconds(10));
@@ -79,6 +86,13 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
receiver_clock_.NowTicks(), FRAME_ACK_SENT, VIDEO_EVENT,
rtp_timestamp, frame_id);
+ cast_environment_->Logging()->InsertPacketEvent(
+ receiver_clock_.NowTicks(),
+ PACKET_RECEIVED, VIDEO_EVENT,
+ rtp_timestamp,
+ frame_id,
+ 56, 78, 1500);
+
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
AdvanceClocks(base::TimeDelta::FromMilliseconds(30));
@@ -121,6 +135,13 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
true,
5678);
+ cast_environment_->Logging()->InsertPacketEvent(
+ sender_clock_->NowTicks(),
+ PACKET_SENT_TO_NETWORK, VIDEO_EVENT,
+ rtp_timestamp,
+ frame_id,
+ 56, 78, 1500);
+
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
AdvanceClocks(base::TimeDelta::FromMilliseconds(10));
@@ -133,6 +154,13 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
+ cast_environment_->Logging()->InsertPacketEvent(
+ event_b_time,
+ PACKET_RECEIVED, VIDEO_EVENT,
+ rtp_timestamp,
+ frame_id,
+ 56, 78, 1500);
+
cast_environment_->Logging()->InsertFrameEvent(
event_b_time, FRAME_ACK_SENT, VIDEO_EVENT, rtp_timestamp, frame_id);
@@ -175,6 +203,13 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
true,
5678);
+ cast_environment_->Logging()->InsertPacketEvent(
+ sender_clock_->NowTicks(),
+ PACKET_SENT_TO_NETWORK, VIDEO_EVENT,
+ rtp_timestamp_a,
+ frame_id_a,
+ 56, 78, 1500);
+
AdvanceClocks(base::TimeDelta::FromMilliseconds(10));
cast_environment_->Logging()->InsertEncodedFrameEvent(
sender_clock_->NowTicks(),
@@ -184,11 +219,27 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
1234,
true,
5678);
+
+ cast_environment_->Logging()->InsertPacketEvent(
+ sender_clock_->NowTicks(),
+ PACKET_SENT_TO_NETWORK, VIDEO_EVENT,
+ rtp_timestamp_b,
+ frame_id_b,
+ 56, 78, 1500);
+
cast_environment_->Logging()->InsertFrameEvent(
receiver_clock_.NowTicks(), FRAME_ACK_SENT, VIDEO_EVENT,
rtp_timestamp_a, frame_id_a);
AdvanceClocks(base::TimeDelta::FromMilliseconds(20));
+
+ cast_environment_->Logging()->InsertPacketEvent(
+ receiver_clock_.NowTicks(),
+ PACKET_RECEIVED, VIDEO_EVENT,
+ rtp_timestamp_b,
+ frame_id_b,
+ 56, 78, 1500);
+
cast_environment_->Logging()->InsertFrameEvent(
receiver_clock_.NowTicks(), FRAME_ACK_SENT, VIDEO_EVENT,
rtp_timestamp_b, frame_id_b);
@@ -217,7 +268,21 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
true,
5678);
+ cast_environment_->Logging()->InsertPacketEvent(
+ sender_clock_->NowTicks(),
+ PACKET_SENT_TO_NETWORK, VIDEO_EVENT,
+ rtp_timestamp_c,
+ frame_id_c,
+ 56, 78, 1500);
+
AdvanceClocks(base::TimeDelta::FromMilliseconds(3));
+ cast_environment_->Logging()->InsertPacketEvent(
+ receiver_clock_.NowTicks(),
+ PACKET_RECEIVED, VIDEO_EVENT,
+ rtp_timestamp_c,
+ frame_id_c,
+ 56, 78, 1500);
+
cast_environment_->Logging()->InsertFrameEvent(
receiver_clock_.NowTicks(), FRAME_ACK_SENT, VIDEO_EVENT,
rtp_timestamp_c, frame_id_c);
@@ -232,10 +297,10 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
EXPECT_TRUE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
int64 lower_bound_ms = lower_bound.InMilliseconds();
int64 upper_bound_ms = upper_bound.InMilliseconds();
- EXPECT_EQ(95, lower_bound_ms);
- EXPECT_EQ(103, upper_bound_ms);
- EXPECT_GE(true_offset_ms, lower_bound_ms);
- EXPECT_LE(true_offset_ms, upper_bound_ms);
+ EXPECT_GT(lower_bound_ms, 90);
+ EXPECT_LE(lower_bound_ms, true_offset_ms);
+ EXPECT_LT(upper_bound_ms, 150);
+ EXPECT_GT(upper_bound_ms, true_offset_ms);
}
} // namespace cast
diff --git a/media/cast/logging/stats_event_subscriber.cc b/media/cast/logging/stats_event_subscriber.cc
index 03c669cfcb..b22812e446 100644
--- a/media/cast/logging/stats_event_subscriber.cc
+++ b/media/cast/logging/stats_event_subscriber.cc
@@ -2,9 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <cmath>
+
#include "media/cast/logging/stats_event_subscriber.h"
+#include "base/format_macros.h"
#include "base/logging.h"
+#include "base/strings/stringprintf.h"
#include "base/values.h"
#define STAT_ENUM_TO_STRING(enum) \
@@ -30,6 +34,66 @@ bool IsReceiverEvent(CastLoggingEvent event) {
} // namespace
+StatsEventSubscriber::SimpleHistogram::SimpleHistogram(int64 min,
+ int64 max,
+ int64 width)
+ : min_(min), max_(max), width_(width), buckets_((max - min) / width + 2) {
+ CHECK_GT(buckets_.size(), 2u);
+ CHECK_EQ(0, (max_ - min_) % width_);
+}
+
+StatsEventSubscriber::SimpleHistogram::~SimpleHistogram() {
+}
+
+void StatsEventSubscriber::SimpleHistogram::Add(int64 sample) {
+ if (sample < min_) {
+ ++buckets_.front();
+ } else if (sample >= max_) {
+ ++buckets_.back();
+ } else {
+ size_t index = 1 + (sample - min_) / width_;
+ DCHECK_LT(index, buckets_.size());
+ ++buckets_[index];
+ }
+}
+
+void StatsEventSubscriber::SimpleHistogram::Reset() {
+ buckets_.assign(buckets_.size(), 0);
+}
+
+scoped_ptr<base::ListValue>
+StatsEventSubscriber::SimpleHistogram::GetHistogram() const {
+ scoped_ptr<base::ListValue> histo(new base::ListValue);
+
+ scoped_ptr<base::DictionaryValue> bucket(new base::DictionaryValue);
+
+ if (buckets_.front()) {
+ bucket->SetInteger(base::StringPrintf("<%" PRId64, min_),
+ buckets_.front());
+ histo->Append(bucket.release());
+ }
+
+ for (size_t i = 1; i < buckets_.size() - 1; i++) {
+ if (!buckets_[i])
+ continue;
+ bucket.reset(new base::DictionaryValue);
+ int64 lower = min_ + (i - 1) * width_;
+ int64 upper = lower + width_ - 1;
+ bucket->SetInteger(
+ base::StringPrintf("%" PRId64 "-%" PRId64, lower, upper),
+ buckets_[i]);
+ histo->Append(bucket.release());
+ }
+
+ if (buckets_.back()) {
+ bucket.reset(new base::DictionaryValue);
+ bucket->SetInteger(base::StringPrintf(">=%" PRId64, max_),
+ buckets_.back());
+ histo->Append(bucket.release());
+ }
+ return histo.Pass();
+}
+
StatsEventSubscriber::StatsEventSubscriber(
EventMediaType event_media_type,
base::TickClock* clock,
@@ -40,11 +104,11 @@ StatsEventSubscriber::StatsEventSubscriber(
network_latency_datapoints_(0),
e2e_latency_datapoints_(0),
num_frames_dropped_by_encoder_(0),
- num_frames_late_(0) {
+ num_frames_late_(0),
+ start_time_(clock_->NowTicks()) {
DCHECK(event_media_type == AUDIO_EVENT || event_media_type == VIDEO_EVENT);
- base::TimeTicks now = clock_->NowTicks();
- start_time_ = now;
- last_response_received_time_ = base::TimeTicks();
+
+ InitHistograms();
}
StatsEventSubscriber::~StatsEventSubscriber() {
@@ -71,17 +135,26 @@ void StatsEventSubscriber::OnReceiveFrameEvent(const FrameEvent& frame_event) {
it->second.sum_delay += frame_event.delay_delta;
}
+ bool is_receiver_event = IsReceiverEvent(type);
+ UpdateFirstLastEventTime(frame_event.timestamp, is_receiver_event);
+
if (type == FRAME_CAPTURE_BEGIN) {
RecordFrameCaptureTime(frame_event);
+ } else if (type == FRAME_CAPTURE_END) {
+ RecordCaptureLatency(frame_event);
} else if (type == FRAME_ENCODED) {
- MarkAsEncoded(frame_event.rtp_timestamp);
+ RecordEncodeLatency(frame_event);
+ } else if (type == FRAME_ACK_SENT) {
+ RecordFrameTxLatency(frame_event);
} else if (type == FRAME_PLAYOUT) {
RecordE2ELatency(frame_event);
- if (frame_event.delay_delta <= base::TimeDelta())
+ base::TimeDelta delay_delta = frame_event.delay_delta;
+ histograms_[PLAYOUT_DELAY_MS_HISTO]->Add(delay_delta.InMillisecondsF());
+ if (delay_delta <= base::TimeDelta())
num_frames_late_++;
}
- if (IsReceiverEvent(type))
+ if (is_receiver_event)
UpdateLastResponseTime(frame_event.timestamp);
}
@@ -104,6 +177,9 @@ void StatsEventSubscriber::OnReceivePacketEvent(
it->second.sum_size += packet_event.size;
}
+ bool is_receiver_event = IsReceiverEvent(type);
+ UpdateFirstLastEventTime(packet_event.timestamp, is_receiver_event);
+
if (type == PACKET_SENT_TO_NETWORK ||
type == PACKET_RECEIVED) {
RecordNetworkLatency(packet_event);
@@ -113,10 +189,31 @@ void StatsEventSubscriber::OnReceivePacketEvent(
ErasePacketSentTime(packet_event);
}
- if (IsReceiverEvent(type))
+ if (is_receiver_event)
UpdateLastResponseTime(packet_event.timestamp);
}
+void StatsEventSubscriber::UpdateFirstLastEventTime(base::TimeTicks timestamp,
+ bool is_receiver_event) {
+ if (is_receiver_event) {
+ base::TimeDelta receiver_offset;
+ if (!GetReceiverOffset(&receiver_offset))
+ return;
+ timestamp -= receiver_offset;
+ }
+
+ if (first_event_time_.is_null()) {
+ first_event_time_ = timestamp;
+ } else {
+ first_event_time_ = std::min(first_event_time_, timestamp);
+ }
+ if (last_event_time_.is_null()) {
+ last_event_time_ = timestamp;
+ } else {
+ last_event_time_ = std::max(last_event_time_, timestamp);
+ }
+}
+
scoped_ptr<base::DictionaryValue> StatsEventSubscriber::GetStats() const {
StatsMap stats_map;
GetStatsInternal(&stats_map);
@@ -125,7 +222,15 @@ scoped_ptr<base::DictionaryValue> StatsEventSubscriber::GetStats() const {
scoped_ptr<base::DictionaryValue> stats(new base::DictionaryValue);
for (StatsMap::const_iterator it = stats_map.begin(); it != stats_map.end();
++it) {
- stats->SetDouble(CastStatToString(it->first), it->second);
+ // Round to 3 digits after the decimal point.
+ stats->SetDouble(CastStatToString(it->first),
+ round(it->second * 1000.0) / 1000.0);
+ }
+ for (HistogramMap::const_iterator it = histograms_.begin();
+ it != histograms_.end();
+ ++it) {
+ stats->Set(CastStatToString(it->first),
+ it->second->GetHistogram().release());
}
ret->Set(event_media_type_ == AUDIO_EVENT ? "audio" : "video",
@@ -145,10 +250,17 @@ void StatsEventSubscriber::Reset() {
e2e_latency_datapoints_ = 0;
num_frames_dropped_by_encoder_ = 0;
num_frames_late_ = 0;
- recent_captured_frames_.clear();
+ recent_frame_infos_.clear();
packet_sent_times_.clear();
start_time_ = clock_->NowTicks();
last_response_received_time_ = base::TimeTicks();
+ for (HistogramMap::iterator it = histograms_.begin(); it != histograms_.end();
+ ++it) {
+ it->second->Reset();
+ }
+
+ first_event_time_ = base::TimeTicks();
+ last_event_time_ = base::TimeTicks();
}
// static
@@ -172,11 +284,34 @@ const char* StatsEventSubscriber::CastStatToString(CastStat stat) {
STAT_ENUM_TO_STRING(NUM_PACKETS_SENT);
STAT_ENUM_TO_STRING(NUM_PACKETS_RETRANSMITTED);
STAT_ENUM_TO_STRING(NUM_PACKETS_RTX_REJECTED);
+ STAT_ENUM_TO_STRING(FIRST_EVENT_TIME_MS);
+ STAT_ENUM_TO_STRING(LAST_EVENT_TIME_MS);
+ STAT_ENUM_TO_STRING(CAPTURE_LATENCY_MS_HISTO);
+ STAT_ENUM_TO_STRING(ENCODE_LATENCY_MS_HISTO);
+ STAT_ENUM_TO_STRING(PACKET_LATENCY_MS_HISTO);
+ STAT_ENUM_TO_STRING(FRAME_LATENCY_MS_HISTO);
+ STAT_ENUM_TO_STRING(PLAYOUT_DELAY_MS_HISTO);
}
NOTREACHED();
return "";
}
+const int kMaxLatencyBucketMs = 800;
+const int kBucketWidthMs = 20;
+
+void StatsEventSubscriber::InitHistograms() {
+ histograms_[CAPTURE_LATENCY_MS_HISTO].reset(
+ new SimpleHistogram(0, kMaxLatencyBucketMs, kBucketWidthMs));
+ histograms_[ENCODE_LATENCY_MS_HISTO].reset(
+ new SimpleHistogram(0, kMaxLatencyBucketMs, kBucketWidthMs));
+ histograms_[PACKET_LATENCY_MS_HISTO].reset(
+ new SimpleHistogram(0, kMaxLatencyBucketMs, kBucketWidthMs));
+ histograms_[FRAME_LATENCY_MS_HISTO].reset(
+ new SimpleHistogram(0, kMaxLatencyBucketMs, kBucketWidthMs));
+ histograms_[PLAYOUT_DELAY_MS_HISTO].reset(
+ new SimpleHistogram(0, kMaxLatencyBucketMs, kBucketWidthMs));
+}
+
void StatsEventSubscriber::GetStatsInternal(StatsMap* stats_map) const {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -231,6 +366,16 @@ void StatsEventSubscriber::GetStatsInternal(StatsMap* stats_map) const {
stats_map->insert(std::make_pair(NUM_FRAMES_DROPPED_BY_ENCODER,
num_frames_dropped_by_encoder_));
stats_map->insert(std::make_pair(NUM_FRAMES_LATE, num_frames_late_));
+ if (!first_event_time_.is_null()) {
+ stats_map->insert(std::make_pair(
+ FIRST_EVENT_TIME_MS,
+ (first_event_time_ - base::TimeTicks::UnixEpoch()).InMillisecondsF()));
+ }
+ if (!last_event_time_.is_null()) {
+ stats_map->insert(std::make_pair(
+ LAST_EVENT_TIME_MS,
+ (last_event_time_ - base::TimeTicks::UnixEpoch()).InMillisecondsF()));
+ }
}
bool StatsEventSubscriber::GetReceiverOffset(base::TimeDelta* offset) {
@@ -245,22 +390,83 @@ bool StatsEventSubscriber::GetReceiverOffset(base::TimeDelta* offset) {
return true;
}
+void StatsEventSubscriber::MaybeInsertFrameInfo(RtpTimestamp rtp_timestamp,
+ const FrameInfo& frame_info) {
+ // No need to insert if |rtp_timestamp| is the smaller than every key in the
+ // map as it is just going to get erased anyway.
+ if (recent_frame_infos_.size() == kMaxFrameInfoMapSize &&
+ rtp_timestamp < recent_frame_infos_.begin()->first) {
+ return;
+ }
+
+ recent_frame_infos_.insert(std::make_pair(rtp_timestamp, frame_info));
+
+ if (recent_frame_infos_.size() >= kMaxFrameInfoMapSize) {
+ FrameInfoMap::iterator erase_it = recent_frame_infos_.begin();
+ if (erase_it->second.encode_time.is_null())
+ num_frames_dropped_by_encoder_++;
+ recent_frame_infos_.erase(erase_it);
+ }
+}
+
void StatsEventSubscriber::RecordFrameCaptureTime(
const FrameEvent& frame_event) {
- recent_captured_frames_.insert(std::make_pair(
- frame_event.rtp_timestamp, FrameInfo(frame_event.timestamp)));
- if (recent_captured_frames_.size() > kMaxFrameInfoMapSize) {
- FrameInfoMap::iterator erase_it = recent_captured_frames_.begin();
- if (!erase_it->second.encoded)
- num_frames_dropped_by_encoder_++;
- recent_captured_frames_.erase(erase_it);
+ FrameInfo frame_info;
+ frame_info.capture_time = frame_event.timestamp;
+ MaybeInsertFrameInfo(frame_event.rtp_timestamp, frame_info);
+}
+
+void StatsEventSubscriber::RecordCaptureLatency(const FrameEvent& frame_event) {
+ FrameInfoMap::iterator it =
+ recent_frame_infos_.find(frame_event.rtp_timestamp);
+ if (it == recent_frame_infos_.end())
+ return;
+
+ if (!it->second.capture_time.is_null()) {
+ double capture_latency_ms =
+ (it->second.capture_time - frame_event.timestamp).InMillisecondsF();
+ histograms_[CAPTURE_LATENCY_MS_HISTO]->Add(capture_latency_ms);
}
+
+ it->second.capture_end_time = frame_event.timestamp;
}
-void StatsEventSubscriber::MarkAsEncoded(RtpTimestamp rtp_timestamp) {
- FrameInfoMap::iterator it = recent_captured_frames_.find(rtp_timestamp);
- if (it != recent_captured_frames_.end())
- it->second.encoded = true;
+void StatsEventSubscriber::RecordEncodeLatency(const FrameEvent& frame_event) {
+ FrameInfoMap::iterator it =
+ recent_frame_infos_.find(frame_event.rtp_timestamp);
+ if (it == recent_frame_infos_.end()) {
+ FrameInfo frame_info;
+ frame_info.encode_time = frame_event.timestamp;
+ MaybeInsertFrameInfo(frame_event.rtp_timestamp, frame_info);
+ return;
+ }
+
+ if (!it->second.capture_end_time.is_null()) {
+ double encode_latency_ms =
+ (frame_event.timestamp - it->second.capture_end_time).InMillisecondsF();
+ histograms_[ENCODE_LATENCY_MS_HISTO]->Add(encode_latency_ms);
+ }
+
+ it->second.encode_time = frame_event.timestamp;
+}
+
+void StatsEventSubscriber::RecordFrameTxLatency(const FrameEvent& frame_event) {
+ FrameInfoMap::iterator it =
+ recent_frame_infos_.find(frame_event.rtp_timestamp);
+ if (it == recent_frame_infos_.end())
+ return;
+
+ if (it->second.encode_time.is_null())
+ return;
+
+ base::TimeDelta receiver_offset;
+ if (!GetReceiverOffset(&receiver_offset))
+ return;
+
+ base::TimeTicks sender_time = frame_event.timestamp - receiver_offset;
+ double frame_tx_latency_ms =
+ (sender_time - it->second.encode_time).InMillisecondsF();
+ histograms_[FRAME_LATENCY_MS_HISTO]->Add(frame_tx_latency_ms);
}
void StatsEventSubscriber::RecordE2ELatency(const FrameEvent& frame_event) {
@@ -269,8 +475,8 @@ void StatsEventSubscriber::RecordE2ELatency(const FrameEvent& frame_event) {
return;
FrameInfoMap::iterator it =
- recent_captured_frames_.find(frame_event.rtp_timestamp);
- if (it == recent_captured_frames_.end())
+ recent_frame_infos_.find(frame_event.rtp_timestamp);
+ if (it == recent_frame_infos_.end())
return;
// Playout time is event time + playout delay.
@@ -306,8 +512,6 @@ void StatsEventSubscriber::RecordNetworkLatency(
std::make_pair(packet_event.rtp_timestamp, packet_event.packet_id));
PacketEventTimeMap::iterator it = packet_sent_times_.find(key);
if (it == packet_sent_times_.end()) {
- std::pair<RtpTimestamp, uint16> key(
- std::make_pair(packet_event.rtp_timestamp, packet_event.packet_id));
std::pair<base::TimeTicks, CastLoggingEvent> value =
std::make_pair(packet_event.timestamp, packet_event.type);
packet_sent_times_.insert(std::make_pair(key, value));
@@ -333,9 +537,14 @@ void StatsEventSubscriber::RecordNetworkLatency(
if (match) {
// Subtract by offset.
packet_received_time -= receiver_offset;
+ base::TimeDelta latency_delta = packet_received_time - packet_sent_time;
- total_network_latency_ += packet_received_time - packet_sent_time;
+ total_network_latency_ += latency_delta;
network_latency_datapoints_++;
+
+ histograms_[PACKET_LATENCY_MS_HISTO]->Add(
+ latency_delta.InMillisecondsF());
+
packet_sent_times_.erase(it);
}
}
@@ -447,8 +656,7 @@ StatsEventSubscriber::PacketLogStats::PacketLogStats()
: event_counter(0), sum_size(0) {}
StatsEventSubscriber::PacketLogStats::~PacketLogStats() {}
-StatsEventSubscriber::FrameInfo::FrameInfo(base::TimeTicks capture_time)
- : capture_time(capture_time), encoded(false) {
+StatsEventSubscriber::FrameInfo::FrameInfo() : encoded(false) {
}
StatsEventSubscriber::FrameInfo::~FrameInfo() {
}
diff --git a/media/cast/logging/stats_event_subscriber.h b/media/cast/logging/stats_event_subscriber.h
index 06ceaca9ed..7662e60fd1 100644
--- a/media/cast/logging/stats_event_subscriber.h
+++ b/media/cast/logging/stats_event_subscriber.h
@@ -6,6 +6,7 @@
#define MEDIA_CAST_LOGGING_STATS_EVENT_SUBSCRIBER_H_
#include "base/gtest_prod_util.h"
+#include "base/memory/linked_ptr.h"
#include "base/memory/scoped_ptr.h"
#include "base/threading/thread_checker.h"
#include "base/time/tick_clock.h"
@@ -15,6 +16,7 @@
namespace base {
class DictionaryValue;
+class ListValue;
}
namespace media {
@@ -76,6 +78,34 @@ class StatsEventSubscriber : public RawEventSubscriber {
size_t sum_size;
};
+ class SimpleHistogram {
+ public:
+ // This will create N+2 buckets where N = (max - min) / width:
+ // Underflow bucket: < min
+ // Bucket 0: [min, min + width - 1]
+ // Bucket 1: [min + width, min + 2 * width - 1]
+ // ...
+ // Bucket N-1: [max - width, max - 1]
+ // Overflow bucket: >= max
+ // |min| must be less than |max|.
+ // |width| must divide |max - min| evenly.
+ SimpleHistogram(int64 min, int64 max, int64 width);
+
+ ~SimpleHistogram();
+
+ void Add(int64 sample);
+
+ void Reset();
+
+ scoped_ptr<base::ListValue> GetHistogram() const;
+
+ private:
+ int64 min_;
+ int64 max_;
+ int64 width_;
+ std::vector<int> buckets_;
+ };
+
enum CastStat {
// Capture frame rate.
CAPTURE_FPS,
@@ -116,17 +146,31 @@ class StatsEventSubscriber : public RawEventSubscriber {
NUM_PACKETS_RETRANSMITTED,
// Number of packets that had their retransmission cancelled.
NUM_PACKETS_RTX_REJECTED,
+ // Unix time in milliseconds of first event since reset.
+ FIRST_EVENT_TIME_MS,
+ // Unix time in milliseconds of last event since reset.
+ LAST_EVENT_TIME_MS,
+
+ // Histograms
+ CAPTURE_LATENCY_MS_HISTO,
+ ENCODE_LATENCY_MS_HISTO,
+ PACKET_LATENCY_MS_HISTO,
+ FRAME_LATENCY_MS_HISTO,
+ PLAYOUT_DELAY_MS_HISTO
};
struct FrameInfo {
- explicit FrameInfo(base::TimeTicks capture_time);
+ FrameInfo();
~FrameInfo();
base::TimeTicks capture_time;
+ base::TimeTicks capture_end_time;
+ base::TimeTicks encode_time;
bool encoded;
};
typedef std::map<CastStat, double> StatsMap;
+ typedef std::map<CastStat, linked_ptr<SimpleHistogram> > HistogramMap;
typedef std::map<RtpTimestamp, FrameInfo> FrameInfoMap;
typedef std::map<
std::pair<RtpTimestamp, uint16>,
@@ -137,12 +181,20 @@ class StatsEventSubscriber : public RawEventSubscriber {
static const char* CastStatToString(CastStat stat);
+ void InitHistograms();
+
// Assigns |stats_map| with stats data. Used for testing.
void GetStatsInternal(StatsMap* stats_map) const;
+ void UpdateFirstLastEventTime(base::TimeTicks timestamp,
+ bool is_receiver_event);
bool GetReceiverOffset(base::TimeDelta* offset);
+ void MaybeInsertFrameInfo(RtpTimestamp rtp_timestamp,
+ const FrameInfo& frame_info);
void RecordFrameCaptureTime(const FrameEvent& frame_event);
- void MarkAsEncoded(RtpTimestamp rtp_timestamp);
+ void RecordCaptureLatency(const FrameEvent& frame_event);
+ void RecordEncodeLatency(const FrameEvent& frame_event);
+ void RecordFrameTxLatency(const FrameEvent& frame_event);
void RecordE2ELatency(const FrameEvent& frame_event);
void RecordPacketSentTime(const PacketEvent& packet_event);
void ErasePacketSentTime(const PacketEvent& packet_event);
@@ -189,13 +241,17 @@ class StatsEventSubscriber : public RawEventSubscriber {
int num_frames_late_;
// Fixed size map to record when recent frames were captured and other info.
- FrameInfoMap recent_captured_frames_;
+ FrameInfoMap recent_frame_infos_;
// Fixed size map to record when recent packets were sent.
PacketEventTimeMap packet_sent_times_;
// Sender time assigned on creation and |Reset()|.
base::TimeTicks start_time_;
+ base::TimeTicks first_event_time_;
+ base::TimeTicks last_event_time_;
+
+ HistogramMap histograms_;
base::ThreadChecker thread_checker_;
DISALLOW_COPY_AND_ASSIGN(StatsEventSubscriber);
diff --git a/media/cast/logging/stats_event_subscriber_unittest.cc b/media/cast/logging/stats_event_subscriber_unittest.cc
index fe03bc6247..95616b1729 100644
--- a/media/cast/logging/stats_event_subscriber_unittest.cc
+++ b/media/cast/logging/stats_event_subscriber_unittest.cc
@@ -140,6 +140,9 @@ TEST_F(StatsEventSubscriberTest, Encode) {
uint32 frame_id = 0;
int num_frames = 10;
base::TimeTicks start_time = sender_clock_->NowTicks();
+ AdvanceClocks(base::TimeDelta::FromMicroseconds(35678));
+ base::TimeTicks first_event_time = sender_clock_->NowTicks();
+ base::TimeTicks last_event_time;
int total_size = 0;
for (int i = 0; i < num_frames; i++) {
int size = 1000 + base::RandInt(-100, 100);
@@ -152,6 +155,7 @@ TEST_F(StatsEventSubscriberTest, Encode) {
size,
true,
5678);
+ last_event_time = sender_clock_->NowTicks();
AdvanceClocks(base::TimeDelta::FromMicroseconds(35678));
rtp_timestamp += 90;
@@ -177,6 +181,20 @@ TEST_F(StatsEventSubscriberTest, Encode) {
EXPECT_DOUBLE_EQ(it->second,
static_cast<double>(total_size) / duration.InMillisecondsF() * 8);
+
+ it = stats_map.find(StatsEventSubscriber::FIRST_EVENT_TIME_MS);
+ ASSERT_TRUE(it != stats_map.end());
+
+ EXPECT_DOUBLE_EQ(
+ it->second,
+ (first_event_time - base::TimeTicks::UnixEpoch()).InMillisecondsF());
+
+ it = stats_map.find(StatsEventSubscriber::LAST_EVENT_TIME_MS);
+ ASSERT_TRUE(it != stats_map.end());
+
+ EXPECT_DOUBLE_EQ(
+ it->second,
+ (last_event_time - base::TimeTicks::UnixEpoch()).InMillisecondsF());
}
TEST_F(StatsEventSubscriberTest, Decode) {
diff --git a/media/cast/net/cast_transport_config.cc b/media/cast/net/cast_transport_config.cc
index b6c9a07aa6..2543f285f7 100644
--- a/media/cast/net/cast_transport_config.cc
+++ b/media/cast/net/cast_transport_config.cc
@@ -8,7 +8,7 @@ namespace media {
namespace cast {
CastTransportRtpConfig::CastTransportRtpConfig()
- : ssrc(0), feedback_ssrc(0), rtp_payload_type(0), stored_frames(0) {}
+ : ssrc(0), feedback_ssrc(0), rtp_payload_type(0) {}
CastTransportRtpConfig::~CastTransportRtpConfig() {}
diff --git a/media/cast/net/cast_transport_config.h b/media/cast/net/cast_transport_config.h
index dcfd133bd0..c5da103acf 100644
--- a/media/cast/net/cast_transport_config.h
+++ b/media/cast/net/cast_transport_config.h
@@ -41,10 +41,6 @@ struct CastTransportRtpConfig {
// RTP payload type enum: Specifies the type/encoding of frame data.
int rtp_payload_type;
- // The number of most-recent frames that must be stored in the transport
- // layer, to facilitate re-transmissions.
- int stored_frames;
-
// The AES crypto key and initialization vector. Each of these strings
// contains the data in binary form, of size kAesKeySize. If they are empty
// strings, crypto is not being used.
diff --git a/media/cast/net/cast_transport_sender.h b/media/cast/net/cast_transport_sender.h
index 9c75d12275..46031a886e 100644
--- a/media/cast/net/cast_transport_sender.h
+++ b/media/cast/net/cast_transport_sender.h
@@ -30,6 +30,10 @@
#include "media/cast/net/rtcp/rtcp_defines.h"
#include "net/base/ip_endpoint.h"
+namespace base {
+class DictionaryValue;
+} // namespace base
+
namespace net {
class NetLog;
} // namespace net
@@ -53,6 +57,7 @@ class CastTransportSender : public base::NonThreadSafe {
net::NetLog* net_log,
base::TickClock* clock,
const net::IPEndPoint& remote_end_point,
+ scoped_ptr<base::DictionaryValue> options,
const CastTransportStatusCallback& status_callback,
const BulkRawEventsCallback& raw_events_callback,
base::TimeDelta raw_events_callback_interval,
@@ -70,11 +75,9 @@ class CastTransportSender : public base::NonThreadSafe {
const RtcpCastMessageCallback& cast_message_cb,
const RtcpRttCallback& rtt_cb) = 0;
- // The following two functions handle the encoded media frames (audio and
- // video) to be processed.
- // Frames will be encrypted, packetized and transmitted to the network.
- virtual void InsertCodedAudioFrame(const EncodedFrame& audio_frame) = 0;
- virtual void InsertCodedVideoFrame(const EncodedFrame& video_frame) = 0;
+ // Encrypt, packetize and transmit |frame|. |ssrc| must refer to a
+ // a channel already established with InitializeAudio / InitializeVideo.
+ virtual void InsertFrame(uint32 ssrc, const EncodedFrame& frame) = 0;
// Sends a RTCP sender report to the receiver.
// |ssrc| is the SSRC for this report.
diff --git a/media/cast/net/cast_transport_sender_impl.cc b/media/cast/net/cast_transport_sender_impl.cc
index 478f75a39b..6c746f485b 100644
--- a/media/cast/net/cast_transport_sender_impl.cc
+++ b/media/cast/net/cast_transport_sender_impl.cc
@@ -5,6 +5,7 @@
#include "media/cast/net/cast_transport_sender_impl.h"
#include "base/single_thread_task_runner.h"
+#include "base/values.h"
#include "media/cast/net/cast_transport_config.h"
#include "media/cast/net/cast_transport_defines.h"
#include "media/cast/net/udp_transport.h"
@@ -13,10 +14,25 @@
namespace media {
namespace cast {
+namespace {
+int LookupOptionWithDefault(const base::DictionaryValue& options,
+ const std::string& path,
+ int default_value) {
+ int ret;
+ if (options.GetInteger(path, &ret)) {
+ return ret;
+ } else {
+ return default_value;
+ }
+};
+
+} // namespace
+
scoped_ptr<CastTransportSender> CastTransportSender::Create(
net::NetLog* net_log,
base::TickClock* clock,
const net::IPEndPoint& remote_end_point,
+ scoped_ptr<base::DictionaryValue> options,
const CastTransportStatusCallback& status_callback,
const BulkRawEventsCallback& raw_events_callback,
base::TimeDelta raw_events_callback_interval,
@@ -25,6 +41,7 @@ scoped_ptr<CastTransportSender> CastTransportSender::Create(
new CastTransportSenderImpl(net_log,
clock,
remote_end_point,
+ options.Pass(),
status_callback,
raw_events_callback,
raw_events_callback_interval,
@@ -40,6 +57,7 @@ CastTransportSenderImpl::CastTransportSenderImpl(
net::NetLog* net_log,
base::TickClock* clock,
const net::IPEndPoint& remote_end_point,
+ scoped_ptr<base::DictionaryValue> options,
const CastTransportStatusCallback& status_callback,
const BulkRawEventsCallback& raw_events_callback,
base::TimeDelta raw_events_callback_interval,
@@ -54,7 +72,13 @@ CastTransportSenderImpl::CastTransportSenderImpl(
net::IPEndPoint(),
remote_end_point,
status_callback)),
- pacer_(clock,
+ pacer_(LookupOptionWithDefault(*options.get(),
+ "pacer_target_burst_size",
+ kTargetBurstSize),
+ LookupOptionWithDefault(*options.get(),
+ "pacer_max_burst_size",
+ kMaxBurstSize),
+ clock,
&logging_,
external_transport ? external_transport : transport_.get(),
transport_task_runner),
@@ -74,12 +98,24 @@ CastTransportSenderImpl::CastTransportSenderImpl(
raw_events_callback_interval);
}
if (transport_) {
- // The default DSCP value for cast is AF41. Which gives it a higher
- // priority over other traffic.
- transport_->SetDscp(net::DSCP_AF41);
+ if (options->HasKey("DSCP")) {
+ // The default DSCP value for cast is AF41. Which gives it a higher
+ // priority over other traffic.
+ transport_->SetDscp(net::DSCP_AF41);
+ }
transport_->StartReceiving(
base::Bind(&CastTransportSenderImpl::OnReceivedPacket,
weak_factory_.GetWeakPtr()));
+ int wifi_options = 0;
+ if (options->HasKey("disable_wifi_scan")) {
+ wifi_options |= net::WIFI_OPTIONS_DISABLE_SCAN;
+ }
+ if (options->HasKey("media_streaming_mode")) {
+ wifi_options |= net::WIFI_OPTIONS_MEDIA_STREAMING_MODE;
+ }
+ if (wifi_options) {
+ wifi_options_autoreset_ = net::SetWifiOptions(wifi_options);
+ }
}
}
@@ -178,16 +214,15 @@ void EncryptAndSendFrame(const EncodedFrame& frame,
}
} // namespace
-void CastTransportSenderImpl::InsertCodedAudioFrame(
- const EncodedFrame& audio_frame) {
- DCHECK(audio_sender_) << "Audio sender uninitialized";
- EncryptAndSendFrame(audio_frame, &audio_encryptor_, audio_sender_.get());
-}
-
-void CastTransportSenderImpl::InsertCodedVideoFrame(
- const EncodedFrame& video_frame) {
- DCHECK(video_sender_) << "Video sender uninitialized";
- EncryptAndSendFrame(video_frame, &video_encryptor_, video_sender_.get());
+void CastTransportSenderImpl::InsertFrame(uint32 ssrc,
+ const EncodedFrame& frame) {
+ if (audio_sender_ && ssrc == audio_sender_->ssrc()) {
+ EncryptAndSendFrame(frame, &audio_encryptor_, audio_sender_.get());
+ } else if (video_sender_ && ssrc == video_sender_->ssrc()) {
+ EncryptAndSendFrame(frame, &video_encryptor_, video_sender_.get());
+ } else {
+ NOTREACHED() << "Invalid InsertFrame call.";
+ }
}
void CastTransportSenderImpl::SendSenderReport(
@@ -223,12 +258,14 @@ void CastTransportSenderImpl::ResendFrameForKickstart(uint32 ssrc,
uint32 frame_id) {
if (audio_sender_ && ssrc == audio_sender_->ssrc()) {
DCHECK(audio_rtcp_session_);
- audio_sender_->ResendFrameForKickstart(frame_id,
- audio_rtcp_session_->rtt());
+ audio_sender_->ResendFrameForKickstart(
+ frame_id,
+ audio_rtcp_session_->current_round_trip_time());
} else if (video_sender_ && ssrc == video_sender_->ssrc()) {
DCHECK(video_rtcp_session_);
- video_sender_->ResendFrameForKickstart(frame_id,
- video_rtcp_session_->rtt());
+ video_sender_->ResendFrameForKickstart(
+ frame_id,
+ video_rtcp_session_->current_round_trip_time());
} else {
NOTREACHED() << "Invalid request for kickstart.";
}
@@ -339,7 +376,7 @@ void CastTransportSenderImpl::OnReceivedCastMessage(
last_byte_acked_for_audio_ =
std::max(acked_bytes, last_byte_acked_for_audio_);
} else if (video_sender_ && video_sender_->ssrc() == ssrc) {
- dedup_info.resend_interval = video_rtcp_session_->rtt();
+ dedup_info.resend_interval = video_rtcp_session_->current_round_trip_time();
// Only use audio stream to dedup if there is one.
if (audio_sender_) {
diff --git a/media/cast/net/cast_transport_sender_impl.h b/media/cast/net/cast_transport_sender_impl.h
index a9c92f7f47..cff5561e28 100644
--- a/media/cast/net/cast_transport_sender_impl.h
+++ b/media/cast/net/cast_transport_sender_impl.h
@@ -54,10 +54,21 @@ class CastTransportSenderImpl : public CastTransportSender {
// This can be a null callback, i.e. if user is not interested in raw events.
// |raw_events_callback_interval|: This can be |base::TimeDelta()| if
// |raw_events_callback| is a null callback.
+ // |options| contains optional settings for the transport, possible
+ // keys are:
+ // "DSCP" (value ignored) - turns DSCP on
+ // "pacer_target_burst_size": int - specifies how many packets to send
+ // per 10 ms ideally.
+ // "pacer_max_burst_size": int - specifies how many pakcets to send
+ // per 10 ms, max
+ // "disable_wifi_scan" (value ignored) - disable wifi scans while streaming
+ // "media_streaming_mode" (value ignored) - turn media streaming mode on
+ // Note, these options may be ignored on some platforms.
CastTransportSenderImpl(
net::NetLog* net_log,
base::TickClock* clock,
const net::IPEndPoint& remote_end_point,
+ scoped_ptr<base::DictionaryValue> options,
const CastTransportStatusCallback& status_callback,
const BulkRawEventsCallback& raw_events_callback,
base::TimeDelta raw_events_callback_interval,
@@ -72,8 +83,7 @@ class CastTransportSenderImpl : public CastTransportSender {
virtual void InitializeVideo(const CastTransportRtpConfig& config,
const RtcpCastMessageCallback& cast_message_cb,
const RtcpRttCallback& rtt_cb) OVERRIDE;
- virtual void InsertCodedAudioFrame(const EncodedFrame& audio_frame) OVERRIDE;
- virtual void InsertCodedVideoFrame(const EncodedFrame& video_frame) OVERRIDE;
+ virtual void InsertFrame(uint32 ssrc, const EncodedFrame& frame) OVERRIDE;
virtual void SendSenderReport(
uint32 ssrc,
@@ -158,6 +168,8 @@ class CastTransportSenderImpl : public CastTransportSender {
// audio packet.
int64 last_byte_acked_for_audio_;
+ scoped_ptr<net::ScopedWifiOptions> wifi_options_autoreset_;
+
base::WeakPtrFactory<CastTransportSenderImpl> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(CastTransportSenderImpl);
diff --git a/media/cast/net/cast_transport_sender_impl_unittest.cc b/media/cast/net/cast_transport_sender_impl_unittest.cc
index 1cbdf5293a..fa9ec471cf 100644
--- a/media/cast/net/cast_transport_sender_impl_unittest.cc
+++ b/media/cast/net/cast_transport_sender_impl_unittest.cc
@@ -9,6 +9,7 @@
#include "base/bind_helpers.h"
#include "base/memory/scoped_ptr.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/values.h"
#include "media/cast/cast_config.h"
#include "media/cast/net/cast_transport_config.h"
#include "media/cast/net/cast_transport_sender_impl.h"
@@ -47,7 +48,7 @@ class FakePacketSender : public PacketSender {
void SetPaused(bool paused) {
paused_ = paused;
- if (!paused && stored_packet_) {
+ if (!paused && stored_packet_.get()) {
SendPacket(stored_packet_, callback_);
callback_.Run();
}
@@ -81,6 +82,28 @@ class CastTransportSenderImplTest : public ::testing::Test {
new CastTransportSenderImpl(NULL,
&testing_clock_,
net::IPEndPoint(),
+ make_scoped_ptr(new base::DictionaryValue),
+ base::Bind(&UpdateCastTransportStatus),
+ BulkRawEventsCallback(),
+ base::TimeDelta(),
+ task_runner_,
+ &transport_));
+ task_runner_->RunTasks();
+ }
+
+ void InitWithOptions() {
+ scoped_ptr<base::DictionaryValue> options(
+ new base::DictionaryValue);
+ options->SetBoolean("DHCP", true);
+ options->SetBoolean("disable_wifi_scan", true);
+ options->SetBoolean("media_streaming_mode", true);
+ options->SetInteger("pacer_target_burst_size", 20);
+ options->SetInteger("pacer_max_burst_size", 100);
+ transport_sender_.reset(
+ new CastTransportSenderImpl(NULL,
+ &testing_clock_,
+ net::IPEndPoint(),
+ options.Pass(),
base::Bind(&UpdateCastTransportStatus),
BulkRawEventsCallback(),
base::TimeDelta(),
@@ -94,6 +117,7 @@ class CastTransportSenderImplTest : public ::testing::Test {
NULL,
&testing_clock_,
net::IPEndPoint(),
+ make_scoped_ptr(new base::DictionaryValue),
base::Bind(&UpdateCastTransportStatus),
base::Bind(&CastTransportSenderImplTest::LogRawEvents,
base::Unretained(this)),
@@ -108,7 +132,6 @@ class CastTransportSenderImplTest : public ::testing::Test {
rtp_config.ssrc = kVideoSsrc;
rtp_config.feedback_ssrc = 2;
rtp_config.rtp_payload_type = 3;
- rtp_config.stored_frames = 10;
transport_sender_->InitializeVideo(rtp_config,
RtcpCastMessageCallback(),
RtcpRttCallback());
@@ -119,7 +142,6 @@ class CastTransportSenderImplTest : public ::testing::Test {
rtp_config.ssrc = kAudioSsrc;
rtp_config.feedback_ssrc = 3;
rtp_config.rtp_payload_type = 4;
- rtp_config.stored_frames = 10;
transport_sender_->InitializeAudio(rtp_config,
RtcpCastMessageCallback(),
RtcpRttCallback());
@@ -152,6 +174,12 @@ TEST_F(CastTransportSenderImplTest, InitWithLogging) {
EXPECT_EQ(5, num_times_callback_called_);
}
+TEST_F(CastTransportSenderImplTest, InitWithOptions) {
+ InitWithOptions();
+ task_runner_->Sleep(base::TimeDelta::FromMilliseconds(50));
+ EXPECT_EQ(0, num_times_callback_called_);
+}
+
TEST_F(CastTransportSenderImplTest, NacksCancelRetransmits) {
InitWithoutLogging();
InitializeVideo();
@@ -164,7 +192,7 @@ TEST_F(CastTransportSenderImplTest, NacksCancelRetransmits) {
fake_frame.dependency = EncodedFrame::KEY;
fake_frame.data.resize(5000, ' ');
- transport_sender_->InsertCodedVideoFrame(fake_frame);
+ transport_sender_->InsertFrame(kVideoSsrc, fake_frame);
task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
EXPECT_EQ(4, transport_.packets_sent());
@@ -209,7 +237,7 @@ TEST_F(CastTransportSenderImplTest, CancelRetransmits) {
fake_frame.dependency = EncodedFrame::KEY;
fake_frame.data.resize(5000, ' ');
- transport_sender_->InsertCodedVideoFrame(fake_frame);
+ transport_sender_->InsertFrame(kVideoSsrc, fake_frame);
task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
EXPECT_EQ(4, transport_.packets_sent());
@@ -248,7 +276,7 @@ TEST_F(CastTransportSenderImplTest, Kickstart) {
fake_frame.data.resize(5000, ' ');
transport_.SetPaused(true);
- transport_sender_->InsertCodedVideoFrame(fake_frame);
+ transport_sender_->InsertFrame(kVideoSsrc, fake_frame);
transport_sender_->ResendFrameForKickstart(kVideoSsrc, 1);
transport_.SetPaused(false);
task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
@@ -285,11 +313,11 @@ TEST_F(CastTransportSenderImplTest, DedupRetransmissionWithAudio) {
fake_audio.reference_time = testing_clock_.NowTicks();
fake_audio.dependency = EncodedFrame::KEY;
fake_audio.data.resize(100, ' ');
- transport_sender_->InsertCodedAudioFrame(fake_audio);
+ transport_sender_->InsertFrame(kAudioSsrc, fake_audio);
task_runner_->Sleep(base::TimeDelta::FromMilliseconds(2));
fake_audio.frame_id = 2;
fake_audio.reference_time = testing_clock_.NowTicks();
- transport_sender_->InsertCodedAudioFrame(fake_audio);
+ transport_sender_->InsertFrame(kAudioSsrc, fake_audio);
task_runner_->Sleep(base::TimeDelta::FromMilliseconds(2));
EXPECT_EQ(2, transport_.packets_sent());
@@ -308,7 +336,7 @@ TEST_F(CastTransportSenderImplTest, DedupRetransmissionWithAudio) {
fake_video.frame_id = 1;
fake_video.dependency = EncodedFrame::KEY;
fake_video.data.resize(5000, ' ');
- transport_sender_->InsertCodedVideoFrame(fake_video);
+ transport_sender_->InsertFrame(kVideoSsrc, fake_video);
task_runner_->RunTasks();
EXPECT_EQ(6, transport_.packets_sent());
diff --git a/media/cast/net/pacing/paced_sender.cc b/media/cast/net/pacing/paced_sender.cc
index b83dc0f1c8..6b39f07408 100644
--- a/media/cast/net/pacing/paced_sender.cc
+++ b/media/cast/net/pacing/paced_sender.cc
@@ -6,6 +6,7 @@
#include "base/big_endian.h"
#include "base/bind.h"
+#include "base/debug/dump_without_crashing.h"
#include "base/message_loop/message_loop.h"
#include "media/cast/logging/logging_impl.h"
@@ -18,14 +19,13 @@ static const int64 kPacingIntervalMs = 10;
// Each frame will be split into no more than kPacingMaxBurstsPerFrame
// bursts of packets.
static const size_t kPacingMaxBurstsPerFrame = 3;
-static const size_t kTargetBurstSize = 10;
-static const size_t kMaxBurstSize = 20;
static const size_t kMaxDedupeWindowMs = 500;
-// Number of packets that we keep the information of sent time and sent bytes.
-// This number allows 0.5 seconds of history if sending at maximum rate.
-static const size_t kPacketHistorySize =
- kMaxBurstSize * kMaxDedupeWindowMs / kPacingIntervalMs;
+// "Impossible" upper-bound on the maximum number of packets that should ever be
+// enqueued in the pacer. This is used to detect bugs, reported as crash dumps.
+static const size_t kHugeQueueLengthSeconds = 10;
+static const size_t kRidiculousNumberOfPackets =
+ kHugeQueueLengthSeconds * (kMaxBurstSize * 1000 / kPacingIntervalMs);
} // namespace
@@ -42,6 +42,8 @@ PacedSender::PacketSendRecord::PacketSendRecord()
: last_byte_sent(0), last_byte_sent_for_audio(0) {}
PacedSender::PacedSender(
+ size_t target_burst_size,
+ size_t max_burst_size,
base::TickClock* clock,
LoggingImpl* logging,
PacketSender* transport,
@@ -52,11 +54,14 @@ PacedSender::PacedSender(
transport_task_runner_(transport_task_runner),
audio_ssrc_(0),
video_ssrc_(0),
- max_burst_size_(kTargetBurstSize),
- next_max_burst_size_(kTargetBurstSize),
- next_next_max_burst_size_(kTargetBurstSize),
+ target_burst_size_(target_burst_size),
+ max_burst_size_(max_burst_size),
+ current_max_burst_size_(target_burst_size_),
+ next_max_burst_size_(target_burst_size_),
+ next_next_max_burst_size_(target_burst_size_),
current_burst_size_(0),
state_(State_Unblocked),
+ has_reached_upper_bound_once_(false),
weak_factory_(this) {
}
@@ -225,6 +230,15 @@ void PacedSender::SendStoredPackets() {
return;
}
+ // If the queue ever becomes impossibly long, send a crash dump without
+ // actually crashing the process.
+ if (size() > kRidiculousNumberOfPackets && !has_reached_upper_bound_once_) {
+ NOTREACHED();
+ // Please use Cr=Internals-Cast label in bug reports:
+ base::debug::DumpWithoutCrashing();
+ has_reached_upper_bound_once_ = true;
+ }
+
base::TimeTicks now = clock_->NowTicks();
// I don't actually trust that PostDelayTask(x - now) will mean that
// now >= x when the call happens, so check if the previous state was
@@ -245,17 +259,9 @@ void PacedSender::SendStoredPackets() {
// which is more bandwidth than the cast library should need, and sending
// out more data per second is unlikely to be helpful.
size_t max_burst_size = std::min(
- kMaxBurstSize,
- std::max(kTargetBurstSize, size() / kPacingMaxBurstsPerFrame));
-
- // If the queue is long, issue a warning. Try to limit the number of
- // warnings issued by only issuing the warning when the burst size
- // grows. Otherwise we might get 100 warnings per second.
- if (max_burst_size > next_next_max_burst_size_ && size() > 100) {
- LOG(WARNING) << "Packet queue is very long:" << size();
- }
-
- max_burst_size_ = std::max(next_max_burst_size_, max_burst_size);
+ max_burst_size_,
+ std::max(target_burst_size_, size() / kPacingMaxBurstsPerFrame));
+ current_max_burst_size_ = std::max(next_max_burst_size_, max_burst_size);
next_max_burst_size_ = std::max(next_next_max_burst_size_, max_burst_size);
next_next_max_burst_size_ = max_burst_size;
}
@@ -263,7 +269,7 @@ void PacedSender::SendStoredPackets() {
base::Closure cb = base::Bind(&PacedSender::SendStoredPackets,
weak_factory_.GetWeakPtr());
while (!empty()) {
- if (current_burst_size_ >= max_burst_size_) {
+ if (current_burst_size_ >= current_max_burst_size_) {
transport_task_runner_->PostDelayedTask(FROM_HERE,
cb,
burst_end_ - now);
@@ -304,11 +310,13 @@ void PacedSender::SendStoredPackets() {
}
// Keep ~0.5 seconds of data (1000 packets).
- if (send_history_buffer_.size() >= kPacketHistorySize) {
+ if (send_history_buffer_.size() >=
+ max_burst_size_ * kMaxDedupeWindowMs / kPacingIntervalMs) {
send_history_.swap(send_history_buffer_);
send_history_buffer_.clear();
}
- DCHECK_LE(send_history_buffer_.size(), kPacketHistorySize);
+ DCHECK_LE(send_history_buffer_.size(),
+ max_burst_size_ * kMaxDedupeWindowMs / kPacingIntervalMs);
state_ = State_Unblocked;
}
diff --git a/media/cast/net/pacing/paced_sender.h b/media/cast/net/pacing/paced_sender.h
index 8e5a60366a..66cc6ec462 100644
--- a/media/cast/net/pacing/paced_sender.h
+++ b/media/cast/net/pacing/paced_sender.h
@@ -22,6 +22,10 @@
namespace media {
namespace cast {
+// Meant to use as defaults for pacer construction.
+static const size_t kTargetBurstSize = 10;
+static const size_t kMaxBurstSize = 20;
+
class LoggingImpl;
// Use std::pair for free comparison operators.
@@ -78,6 +82,8 @@ class PacedSender : public PacedPacketSender,
// The |external_transport| should only be used by the Cast receiver and for
// testing.
PacedSender(
+ size_t target_burst_size, // Should normally be kTargetBurstSize.
+ size_t max_burst_size, // Should normally be kMaxBurstSize.
base::TickClock* clock,
LoggingImpl* logging,
PacketSender* external_transport,
@@ -185,8 +191,11 @@ class PacedSender : public PacedPacketSender,
// Records the last byte sent for payload with a specific SSRC.
std::map<uint32, int64> last_byte_sent_;
- // Maximum burst size for the next three bursts.
+ size_t target_burst_size_;
size_t max_burst_size_;
+
+ // Maximum burst size for the next three bursts.
+ size_t current_max_burst_size_;
size_t next_max_burst_size_;
size_t next_next_max_burst_size_;
// Number of packets already sent in the current burst.
@@ -196,6 +205,8 @@ class PacedSender : public PacedPacketSender,
State state_;
+ bool has_reached_upper_bound_once_;
+
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<PacedSender> weak_factory_;
diff --git a/media/cast/net/pacing/paced_sender_unittest.cc b/media/cast/net/pacing/paced_sender_unittest.cc
index 68b8c06fcc..e1fa5586d1 100644
--- a/media/cast/net/pacing/paced_sender_unittest.cc
+++ b/media/cast/net/pacing/paced_sender_unittest.cc
@@ -65,8 +65,12 @@ class PacedSenderTest : public ::testing::Test {
testing_clock_.Advance(
base::TimeDelta::FromMilliseconds(kStartMillisecond));
task_runner_ = new test::FakeSingleThreadTaskRunner(&testing_clock_);
- paced_sender_.reset(new PacedSender(
- &testing_clock_, &logging_, &mock_transport_, task_runner_));
+ paced_sender_.reset(new PacedSender(kTargetBurstSize,
+ kMaxBurstSize,
+ &testing_clock_,
+ &logging_,
+ &mock_transport_,
+ task_runner_));
paced_sender_->RegisterAudioSsrc(kAudioSsrc);
paced_sender_->RegisterVideoSsrc(kVideoSsrc);
}
diff --git a/media/cast/net/rtcp/rtcp.cc b/media/cast/net/rtcp/rtcp.cc
index 6b7718dde9..77be988dac 100644
--- a/media/cast/net/rtcp/rtcp.cc
+++ b/media/cast/net/rtcp/rtcp.cc
@@ -8,8 +8,9 @@
#include "media/cast/cast_defines.h"
#include "media/cast/cast_environment.h"
#include "media/cast/net/cast_transport_defines.h"
+#include "media/cast/net/pacing/paced_sender.h"
+#include "media/cast/net/rtcp/rtcp_builder.h"
#include "media/cast/net/rtcp/rtcp_defines.h"
-#include "media/cast/net/rtcp/rtcp_sender.h"
#include "media/cast/net/rtcp/rtcp_utility.h"
using base::TimeDelta;
@@ -17,7 +18,7 @@ using base::TimeDelta;
namespace media {
namespace cast {
-static const int32 kMaxRttMs = 10000; // 10 seconds.
+static const int32 kStatsHistoryWindowMs = 10000; // 10 seconds.
// Reject packets that are older than 0.5 seconds older than
// the newest packet we've seen so far. This protect internal
// states from crazy routers. (Based on RRTR)
@@ -64,15 +65,14 @@ Rtcp::Rtcp(const RtcpCastMessageCallback& cast_callback,
rtt_callback_(rtt_callback),
log_callback_(log_callback),
clock_(clock),
- rtcp_sender_(new RtcpSender(packet_sender, local_ssrc)),
+ rtcp_builder_(local_ssrc),
+ packet_sender_(packet_sender),
local_ssrc_(local_ssrc),
remote_ssrc_(remote_ssrc),
last_report_truncated_ntp_(0),
local_clock_ahead_by_(ClockDriftSmoother::GetDefaultTimeConstant()),
lip_sync_rtp_timestamp_(0),
- lip_sync_ntp_timestamp_(0),
- min_rtt_(TimeDelta::FromMilliseconds(kMaxRttMs)),
- number_of_rtt_in_avg_(0) {
+ lip_sync_ntp_timestamp_(0) {
}
Rtcp::~Rtcp() {}
@@ -209,11 +209,9 @@ void Rtcp::SendRtcpFromRtpReceiver(
if (rtp_receiver_statistics) {
report_block.remote_ssrc = 0; // Not needed to set send side.
report_block.media_ssrc = remote_ssrc_; // SSRC of the RTP packet sender.
- if (rtp_receiver_statistics) {
- rtp_receiver_statistics->GetStatistics(
- &report_block.fraction_lost, &report_block.cumulative_lost,
- &report_block.extended_high_sequence_number, &report_block.jitter);
- }
+ rtp_receiver_statistics->GetStatistics(
+ &report_block.fraction_lost, &report_block.cumulative_lost,
+ &report_block.extended_high_sequence_number, &report_block.jitter);
report_block.last_sr = last_report_truncated_ntp_;
if (!time_last_report_received_.is_null()) {
@@ -228,12 +226,14 @@ void Rtcp::SendRtcpFromRtpReceiver(
report_block.delay_since_last_sr = 0;
}
}
- rtcp_sender_->SendRtcpFromRtpReceiver(
- rtp_receiver_statistics ? &report_block : NULL,
- &rrtr,
- cast_message,
- rtcp_events,
- target_delay);
+ packet_sender_->SendRtcpPacket(
+ local_ssrc_,
+ rtcp_builder_.BuildRtcpFromReceiver(
+ rtp_receiver_statistics ? &report_block : NULL,
+ &rrtr,
+ cast_message,
+ rtcp_events,
+ target_delay));
}
void Rtcp::SendRtcpFromRtpSender(base::TimeTicks current_time,
@@ -254,7 +254,9 @@ void Rtcp::SendRtcpFromRtpSender(base::TimeTicks current_time,
sender_info.send_packet_count = send_packet_count;
sender_info.send_octet_count = send_octet_count;
- rtcp_sender_->SendRtcpFromRtpSender(sender_info);
+ packet_sender_->SendRtcpPacket(
+ local_ssrc_,
+ rtcp_builder_.BuildRtcpFromSender(sender_info));
}
void Rtcp::OnReceivedNtp(uint32 ntp_seconds, uint32 ntp_fraction) {
@@ -265,8 +267,9 @@ void Rtcp::OnReceivedNtp(uint32 ntp_seconds, uint32 ntp_fraction) {
// TODO(miu): This clock offset calculation does not account for packet
// transit time over the network. End2EndTest.EvilNetwork confirms that this
- // contributes a very significant source of error here. Fix this along with
- // the RTT clean-up.
+ // contributes a very significant source of error here. Determine whether
+ // RTT should be factored-in, and how that changes the rest of the
+ // calculation.
const base::TimeDelta measured_offset =
now - ConvertNtpToTimeTicks(ntp_seconds, ntp_fraction);
local_clock_ahead_by_.Update(now, measured_offset);
@@ -320,8 +323,20 @@ void Rtcp::OnReceivedDelaySinceLastReport(uint32 last_report,
return; // Feedback on another report.
}
- base::TimeDelta sender_delay = clock_->NowTicks() - it->second;
- UpdateRtt(sender_delay, ConvertFromNtpDiff(delay_since_last_report));
+ const base::TimeDelta sender_delay = clock_->NowTicks() - it->second;
+ const base::TimeDelta receiver_delay =
+ ConvertFromNtpDiff(delay_since_last_report);
+ current_round_trip_time_ = sender_delay - receiver_delay;
+ // If the round trip time was computed as less than 1 ms, assume clock
+ // imprecision by one or both peers caused a bad value to be calculated.
+ // While plenty of networks do easily achieve less than 1 ms round trip time,
+ // such a level of precision cannot be measured with our approach; and 1 ms is
+ // good enough to represent "under 1 ms" for our use cases.
+ current_round_trip_time_ =
+ std::max(current_round_trip_time_, base::TimeDelta::FromMilliseconds(1));
+
+ if (!rtt_callback_.is_null())
+ rtt_callback_.Run(current_round_trip_time_);
}
void Rtcp::OnReceivedCastFeedback(const RtcpCastMessage& cast_message) {
@@ -342,7 +357,8 @@ void Rtcp::SaveLastSentNtpTime(const base::TimeTicks& now,
last_reports_sent_map_[last_report] = now;
last_reports_sent_queue_.push(std::make_pair(last_report, now));
- base::TimeTicks timeout = now - TimeDelta::FromMilliseconds(kMaxRttMs);
+ const base::TimeTicks timeout =
+ now - TimeDelta::FromMilliseconds(kStatsHistoryWindowMs);
// Cleanup old statistics older than |timeout|.
while (!last_reports_sent_queue_.empty()) {
@@ -356,48 +372,6 @@ void Rtcp::SaveLastSentNtpTime(const base::TimeTicks& now,
}
}
-void Rtcp::UpdateRtt(const base::TimeDelta& sender_delay,
- const base::TimeDelta& receiver_delay) {
- base::TimeDelta rtt = sender_delay - receiver_delay;
- // TODO(miu): Find out why this must be >= 1 ms, and remove the fudge if it's
- // bogus.
- rtt = std::max(rtt, base::TimeDelta::FromMilliseconds(1));
- rtt_ = rtt;
- min_rtt_ = std::min(min_rtt_, rtt);
- max_rtt_ = std::max(max_rtt_, rtt);
-
- // TODO(miu): Replace "average for all time" with an EWMA, or suitable
- // "average over recent past" mechanism.
- if (number_of_rtt_in_avg_ != 0) {
- // Integer math equivalent of (ac/(ac+1.0))*avg_rtt_ + (1.0/(ac+1.0))*rtt).
- // (TimeDelta only supports math with other TimeDeltas and int64s.)
- avg_rtt_ = (avg_rtt_ * number_of_rtt_in_avg_ + rtt) /
- (number_of_rtt_in_avg_ + 1);
- } else {
- avg_rtt_ = rtt;
- }
- number_of_rtt_in_avg_++;
-
- if (!rtt_callback_.is_null())
- rtt_callback_.Run(rtt, avg_rtt_, min_rtt_, max_rtt_);
-}
-
-bool Rtcp::Rtt(base::TimeDelta* rtt, base::TimeDelta* avg_rtt,
- base::TimeDelta* min_rtt, base::TimeDelta* max_rtt) const {
- DCHECK(rtt) << "Invalid argument";
- DCHECK(avg_rtt) << "Invalid argument";
- DCHECK(min_rtt) << "Invalid argument";
- DCHECK(max_rtt) << "Invalid argument";
-
- if (number_of_rtt_in_avg_ == 0) return false;
-
- *rtt = rtt_;
- *avg_rtt = avg_rtt_;
- *min_rtt = min_rtt_;
- *max_rtt = max_rtt_;
- return true;
-}
-
void Rtcp::OnReceivedReceiverLog(const RtcpReceiverLogMessage& receiver_log) {
if (log_callback_.is_null())
return;
diff --git a/media/cast/net/rtcp/rtcp.h b/media/cast/net/rtcp/rtcp.h
index a1fdc9e489..6b3208425c 100644
--- a/media/cast/net/rtcp/rtcp.h
+++ b/media/cast/net/rtcp/rtcp.h
@@ -23,6 +23,7 @@
#include "media/cast/net/cast_transport_defines.h"
#include "media/cast/net/cast_transport_sender.h"
#include "media/cast/net/rtcp/receiver_rtcp_event_subscriber.h"
+#include "media/cast/net/rtcp/rtcp_builder.h"
#include "media/cast/net/rtcp/rtcp_defines.h"
namespace media {
@@ -31,7 +32,7 @@ namespace cast {
class LocalRtcpReceiverFeedback;
class PacedPacketSender;
class RtcpReceiver;
-class RtcpSender;
+class RtcpBuilder;
typedef std::pair<uint32, base::TimeTicks> RtcpSendTimePair;
typedef std::map<uint32, base::TimeTicks> RtcpSendTimeMap;
@@ -88,15 +89,6 @@ class Rtcp {
// this session, e.g. SSRC doesn't match.
bool IncomingRtcpPacket(const uint8* data, size_t length);
- // TODO(miu): Clean up this method and downstream code: Only VideoSender uses
- // this (for congestion control), and only the |rtt| and |avg_rtt| values, and
- // it's not clear that any of the downstream code is doing the right thing
- // with this data.
- bool Rtt(base::TimeDelta* rtt,
- base::TimeDelta* avg_rtt,
- base::TimeDelta* min_rtt,
- base::TimeDelta* max_rtt) const;
-
// If available, returns true and sets the output arguments to the latest
// lip-sync timestamps gleaned from the sender reports. While the sender
// provides reference NTP times relative to its own wall clock, the
@@ -107,9 +99,13 @@ class Rtcp {
void OnReceivedReceiverLog(const RtcpReceiverLogMessage& receiver_log);
+ // If greater than zero, this is the last measured network round trip time.
+ base::TimeDelta current_round_trip_time() const {
+ return current_round_trip_time_;
+ }
+
static bool IsRtcpPacket(const uint8* packet, size_t length);
static uint32 GetSsrcOfSender(const uint8* rtcp_buffer, size_t length);
- const base::TimeDelta& rtt() const { return rtt_; }
protected:
void OnReceivedNtp(uint32 ntp_seconds, uint32 ntp_fraction);
@@ -123,9 +119,6 @@ class Rtcp {
void OnReceivedCastFeedback(const RtcpCastMessage& cast_message);
- void UpdateRtt(const base::TimeDelta& sender_delay,
- const base::TimeDelta& receiver_delay);
-
void SaveLastSentNtpTime(const base::TimeTicks& now,
uint32 last_ntp_seconds,
uint32 last_ntp_fraction);
@@ -138,7 +131,8 @@ class Rtcp {
const RtcpRttCallback rtt_callback_;
const RtcpLogMessageCallback log_callback_;
base::TickClock* const clock_; // Not owned by this class.
- const scoped_ptr<RtcpSender> rtcp_sender_;
+ RtcpBuilder rtcp_builder_;
+ PacedPacketSender* packet_sender_; // Not owned.
const uint32 local_ssrc_;
const uint32 remote_ssrc_;
@@ -165,11 +159,10 @@ class Rtcp {
uint32 lip_sync_rtp_timestamp_;
uint64 lip_sync_ntp_timestamp_;
- base::TimeDelta rtt_;
- base::TimeDelta min_rtt_;
- base::TimeDelta max_rtt_;
- int number_of_rtt_in_avg_;
- base::TimeDelta avg_rtt_;
+ // The last measured network round trip time. This is updated with each
+ // sender report --> receiver report round trip. If this is zero, then the
+ // round trip time has not been measured yet.
+ base::TimeDelta current_round_trip_time_;
base::TimeTicks largest_seen_timestamp_;
diff --git a/media/cast/net/rtcp/rtcp_sender.cc b/media/cast/net/rtcp/rtcp_builder.cc
index 4ca6eb581b..b4e58c3d36 100644
--- a/media/cast/net/rtcp/rtcp_sender.cc
+++ b/media/cast/net/rtcp/rtcp_builder.cc
@@ -2,17 +2,15 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/cast/net/rtcp/rtcp_sender.h"
+#include "media/cast/net/rtcp/rtcp_builder.h"
#include <stdint.h>
#include <algorithm>
#include <vector>
-#include "base/big_endian.h"
#include "base/logging.h"
#include "media/cast/net/cast_transport_defines.h"
-#include "media/cast/net/pacing/paced_sender.h"
#include "media/cast/net/rtcp/rtcp_defines.h"
#include "media/cast/net/rtcp/rtcp_utility.h"
@@ -46,7 +44,7 @@ bool EventTimestampLessThan(const RtcpReceiverEventLogMessage& lhs,
return lhs.event_timestamp < rhs.event_timestamp;
}
-void AddReceiverLog(
+void AddReceiverLogEntries(
const RtcpReceiverLogMessage& redundancy_receiver_log_message,
RtcpReceiverLogMessage* receiver_log_message,
size_t* remaining_space,
@@ -67,7 +65,7 @@ void AddReceiverLog(
*remaining_space -= kRtcpReceiverFrameLogSize +
event_log_messages.size() * kRtcpReceiverEventLogSize;
- ++*number_of_frames;
+ ++number_of_frames;
*total_number_of_messages_to_send += event_log_messages.size();
++it;
}
@@ -145,161 +143,139 @@ class NackStringBuilder {
};
} // namespace
-RtcpSender::RtcpSender(PacedPacketSender* outgoing_transport,
- uint32 sending_ssrc)
- : ssrc_(sending_ssrc),
- transport_(outgoing_transport) {
+RtcpBuilder::RtcpBuilder(uint32 sending_ssrc)
+ : writer_(NULL, 0),
+ ssrc_(sending_ssrc),
+ ptr_of_length_(NULL) {
}
-RtcpSender::~RtcpSender() {}
+RtcpBuilder::~RtcpBuilder() {}
+
+void RtcpBuilder::PatchLengthField() {
+ if (ptr_of_length_) {
+ // Back-patch the packet length. The client must have taken
+ // care of proper padding to 32-bit words.
+ int this_packet_length = (writer_.ptr() - ptr_of_length_ - 2);
+ DCHECK_EQ(0, this_packet_length % 4)
+ << "Packets must be a multiple of 32 bits long";
+ *ptr_of_length_ = this_packet_length >> 10;
+ *(ptr_of_length_ + 1) = (this_packet_length >> 2) & 0xFF;
+ ptr_of_length_ = NULL;
+ }
+}
+
+// Set the 5-bit value in the 1st byte of the header
+// and the payload type. Set aside room for the length field,
+// and make provision for back-patching it.
+void RtcpBuilder::AddRtcpHeader(RtcpPacketFields payload, int format_or_count) {
+ PatchLengthField();
+ writer_.WriteU8(0x80 | (format_or_count & 0x1F));
+ writer_.WriteU8(payload);
+ ptr_of_length_ = writer_.ptr();
+
+ // Initialize length to "clearly illegal".
+ writer_.WriteU16(0xDEAD);
+}
+
+void RtcpBuilder::Start() {
+ packet_ = new base::RefCountedData<Packet>;
+ packet_->data.resize(kMaxIpPacketSize);
+ writer_ = base::BigEndianWriter(
+ reinterpret_cast<char*>(&(packet_->data[0])), kMaxIpPacketSize);
+}
-void RtcpSender::SendRtcpFromRtpReceiver(
+PacketRef RtcpBuilder::Finish() {
+ PatchLengthField();
+ packet_->data.resize(kMaxIpPacketSize - writer_.remaining());
+ writer_ = base::BigEndianWriter(NULL, 0);
+ PacketRef ret = packet_;
+ packet_ = NULL;
+ return ret;
+}
+
+PacketRef RtcpBuilder::BuildRtcpFromReceiver(
const RtcpReportBlock* report_block,
const RtcpReceiverReferenceTimeReport* rrtr,
const RtcpCastMessage* cast_message,
const ReceiverRtcpEventSubscriber::RtcpEventMultiMap* rtcp_events,
base::TimeDelta target_delay) {
- PacketRef packet(new base::RefCountedData<Packet>);
- packet->data.reserve(kMaxIpPacketSize);
+ Start();
+
if (report_block)
- BuildRR(report_block, &packet->data);
+ AddRR(report_block);
if (rrtr)
- BuildRrtr(rrtr, &packet->data);
+ AddRrtr(rrtr);
if (cast_message)
- BuildCast(cast_message, target_delay, &packet->data);
+ AddCast(cast_message, target_delay);
if (rtcp_events)
- BuildReceiverLog(*rtcp_events, &packet->data);
-
- if (packet->data.empty()) {
- NOTREACHED() << "Empty packet.";
- return; // Sanity don't send empty packets.
- }
+ AddReceiverLog(*rtcp_events);
- transport_->SendRtcpPacket(ssrc_, packet);
+ return Finish();
}
-void RtcpSender::SendRtcpFromRtpSender(
- const RtcpSenderInfo& sender_info) {
- PacketRef packet(new base::RefCountedData<Packet>);
- packet->data.reserve(kMaxIpPacketSize);
- BuildSR(sender_info, &packet->data);
-
- if (packet->data.empty()) {
- NOTREACHED() << "Empty packet.";
- return; // Sanity - don't send empty packets.
- }
-
- transport_->SendRtcpPacket(ssrc_, packet);
+PacketRef RtcpBuilder::BuildRtcpFromSender(const RtcpSenderInfo& sender_info) {
+ Start();
+ AddSR(sender_info);
+ return Finish();
}
-void RtcpSender::BuildRR(const RtcpReportBlock* report_block,
- Packet* packet) const {
- size_t start_size = packet->size();
- DCHECK_LT(start_size + 32, kMaxIpPacketSize) << "Not enough buffer space";
- if (start_size + 32 > kMaxIpPacketSize)
- return;
-
- uint16 number_of_rows = (report_block) ? 7 : 1;
- packet->resize(start_size + 8);
-
- base::BigEndianWriter big_endian_writer(
- reinterpret_cast<char*>(&((*packet)[start_size])), 8);
- big_endian_writer.WriteU8(0x80 + (report_block ? 1 : 0));
- big_endian_writer.WriteU8(kPacketTypeReceiverReport);
- big_endian_writer.WriteU16(number_of_rows);
- big_endian_writer.WriteU32(ssrc_);
-
+void RtcpBuilder::AddRR(const RtcpReportBlock* report_block) {
+ AddRtcpHeader(kPacketTypeReceiverReport, report_block ? 1 : 0);
+ writer_.WriteU32(ssrc_);
if (report_block) {
- AddReportBlocks(*report_block, packet); // Adds 24 bytes.
+ AddReportBlocks(*report_block); // Adds 24 bytes.
}
}
-void RtcpSender::AddReportBlocks(const RtcpReportBlock& report_block,
- Packet* packet) const {
- size_t start_size = packet->size();
- DCHECK_LT(start_size + 24, kMaxIpPacketSize) << "Not enough buffer space";
- if (start_size + 24 > kMaxIpPacketSize)
- return;
-
- packet->resize(start_size + 24);
-
- base::BigEndianWriter big_endian_writer(
- reinterpret_cast<char*>(&((*packet)[start_size])), 24);
- big_endian_writer.WriteU32(report_block.media_ssrc);
- big_endian_writer.WriteU8(report_block.fraction_lost);
- big_endian_writer.WriteU8(report_block.cumulative_lost >> 16);
- big_endian_writer.WriteU8(report_block.cumulative_lost >> 8);
- big_endian_writer.WriteU8(report_block.cumulative_lost);
+void RtcpBuilder::AddReportBlocks(const RtcpReportBlock& report_block) {
+ writer_.WriteU32(report_block.media_ssrc);
+ writer_.WriteU8(report_block.fraction_lost);
+ writer_.WriteU8(report_block.cumulative_lost >> 16);
+ writer_.WriteU8(report_block.cumulative_lost >> 8);
+ writer_.WriteU8(report_block.cumulative_lost);
// Extended highest seq_no, contain the highest sequence number received.
- big_endian_writer.WriteU32(report_block.extended_high_sequence_number);
- big_endian_writer.WriteU32(report_block.jitter);
+ writer_.WriteU32(report_block.extended_high_sequence_number);
+ writer_.WriteU32(report_block.jitter);
// Last SR timestamp; our NTP time when we received the last report.
// This is the value that we read from the send report packet not when we
// received it.
- big_endian_writer.WriteU32(report_block.last_sr);
+ writer_.WriteU32(report_block.last_sr);
// Delay since last received report, time since we received the report.
- big_endian_writer.WriteU32(report_block.delay_since_last_sr);
+ writer_.WriteU32(report_block.delay_since_last_sr);
}
-void RtcpSender::BuildRrtr(const RtcpReceiverReferenceTimeReport* rrtr,
- Packet* packet) const {
- size_t start_size = packet->size();
- DCHECK_LT(start_size + 20, kMaxIpPacketSize) << "Not enough buffer space";
- if (start_size + 20 > kMaxIpPacketSize)
- return;
-
- packet->resize(start_size + 20);
-
- base::BigEndianWriter big_endian_writer(
- reinterpret_cast<char*>(&((*packet)[start_size])), 20);
-
- big_endian_writer.WriteU8(0x80);
- big_endian_writer.WriteU8(kPacketTypeXr);
- big_endian_writer.WriteU16(4); // Length.
- big_endian_writer.WriteU32(ssrc_); // Add our own SSRC.
- big_endian_writer.WriteU8(4); // Add block type.
- big_endian_writer.WriteU8(0); // Add reserved.
- big_endian_writer.WriteU16(2); // Block length.
+void RtcpBuilder::AddRrtr(const RtcpReceiverReferenceTimeReport* rrtr) {
+ AddRtcpHeader(kPacketTypeXr, 0);
+ writer_.WriteU32(ssrc_); // Add our own SSRC.
+ writer_.WriteU8(4); // Add block type.
+ writer_.WriteU8(0); // Add reserved.
+ writer_.WriteU16(2); // Block length.
// Add the media (received RTP) SSRC.
- big_endian_writer.WriteU32(rrtr->ntp_seconds);
- big_endian_writer.WriteU32(rrtr->ntp_fraction);
+ writer_.WriteU32(rrtr->ntp_seconds);
+ writer_.WriteU32(rrtr->ntp_fraction);
}
-void RtcpSender::BuildCast(const RtcpCastMessage* cast,
- base::TimeDelta target_delay,
- Packet* packet) const {
- size_t start_size = packet->size();
- DCHECK_LT(start_size + 20, kMaxIpPacketSize) << "Not enough buffer space";
- if (start_size + 20 > kMaxIpPacketSize)
- return;
-
- packet->resize(start_size + 20);
-
- base::BigEndianWriter big_endian_writer(
- reinterpret_cast<char*>(&((*packet)[start_size])), 20);
- uint8 FMT = 15; // Application layer feedback.
- big_endian_writer.WriteU8(0x80 + FMT);
- big_endian_writer.WriteU8(kPacketTypePayloadSpecific);
- big_endian_writer.WriteU8(0);
- size_t cast_size_pos = start_size + 3; // Save length position.
- big_endian_writer.WriteU8(4);
- big_endian_writer.WriteU32(ssrc_); // Add our own SSRC.
- big_endian_writer.WriteU32(cast->media_ssrc); // Remote SSRC.
- big_endian_writer.WriteU32(kCast);
- big_endian_writer.WriteU8(static_cast<uint8>(cast->ack_frame_id));
- size_t cast_loss_field_pos = start_size + 17; // Save loss field position.
- big_endian_writer.WriteU8(0); // Overwritten with number_of_loss_fields.
+void RtcpBuilder::AddCast(const RtcpCastMessage* cast,
+ base::TimeDelta target_delay) {
+ // See RTC 4585 Section 6.4 for application specific feedback messages.
+ AddRtcpHeader(kPacketTypePayloadSpecific, 15);
+ writer_.WriteU32(ssrc_); // Add our own SSRC.
+ writer_.WriteU32(cast->media_ssrc); // Remote SSRC.
+ writer_.WriteU32(kCast);
+ writer_.WriteU8(static_cast<uint8>(cast->ack_frame_id));
+ uint8* cast_loss_field_pos = reinterpret_cast<uint8*>(writer_.ptr());
+ writer_.WriteU8(0); // Overwritten with number_of_loss_fields.
DCHECK_LE(target_delay.InMilliseconds(),
std::numeric_limits<uint16_t>::max());
- big_endian_writer.WriteU16(target_delay.InMilliseconds());
+ writer_.WriteU16(target_delay.InMilliseconds());
size_t number_of_loss_fields = 0;
size_t max_number_of_loss_fields = std::min<size_t>(
- kRtcpMaxCastLossFields, (kMaxIpPacketSize - packet->size()) / 4);
+ kRtcpMaxCastLossFields, writer_.remaining() / 4);
MissingFramesAndPacketsMap::const_iterator frame_it =
cast->missing_frames_and_packets.begin();
@@ -312,28 +288,18 @@ void RtcpSender::BuildCast(const RtcpCastMessage* cast,
// Iterate through all frames with missing packets.
if (frame_it->second.empty()) {
// Special case all packets in a frame is missing.
- start_size = packet->size();
- packet->resize(start_size + 4);
- base::BigEndianWriter big_endian_nack_writer(
- reinterpret_cast<char*>(&((*packet)[start_size])), 4);
- big_endian_nack_writer.WriteU8(static_cast<uint8>(frame_it->first));
- big_endian_nack_writer.WriteU16(kRtcpCastAllPacketsLost);
- big_endian_nack_writer.WriteU8(0);
+ writer_.WriteU8(static_cast<uint8>(frame_it->first));
+ writer_.WriteU16(kRtcpCastAllPacketsLost);
+ writer_.WriteU8(0);
nack_string_builder.PushPacket(kRtcpCastAllPacketsLost);
++number_of_loss_fields;
} else {
PacketIdSet::const_iterator packet_it = frame_it->second.begin();
while (packet_it != frame_it->second.end()) {
uint16 packet_id = *packet_it;
-
- start_size = packet->size();
- packet->resize(start_size + 4);
- base::BigEndianWriter big_endian_nack_writer(
- reinterpret_cast<char*>(&((*packet)[start_size])), 4);
-
// Write frame and packet id to buffer before calculating bitmask.
- big_endian_nack_writer.WriteU8(static_cast<uint8>(frame_it->first));
- big_endian_nack_writer.WriteU16(packet_id);
+ writer_.WriteU8(static_cast<uint8>(frame_it->first));
+ writer_.WriteU16(packet_id);
nack_string_builder.PushPacket(packet_id);
uint8 bitmask = 0;
@@ -348,7 +314,7 @@ void RtcpSender::BuildCast(const RtcpCastMessage* cast,
break;
}
}
- big_endian_nack_writer.WriteU8(bitmask);
+ writer_.WriteU8(bitmask);
++number_of_loss_fields;
}
}
@@ -358,34 +324,17 @@ void RtcpSender::BuildCast(const RtcpCastMessage* cast,
<< ", ACK: " << cast->ack_frame_id
<< ", NACK: " << nack_string_builder.GetString();
DCHECK_LE(number_of_loss_fields, kRtcpMaxCastLossFields);
- (*packet)[cast_size_pos] = static_cast<uint8>(4 + number_of_loss_fields);
- (*packet)[cast_loss_field_pos] = static_cast<uint8>(number_of_loss_fields);
+ *cast_loss_field_pos = static_cast<uint8>(number_of_loss_fields);
}
-void RtcpSender::BuildSR(const RtcpSenderInfo& sender_info,
- Packet* packet) const {
- // Sender report.
- size_t start_size = packet->size();
- if (start_size + 52 > kMaxIpPacketSize) {
- DLOG(FATAL) << "Not enough buffer space";
- return;
- }
-
- uint16 number_of_rows = 6;
- packet->resize(start_size + 28);
-
- base::BigEndianWriter big_endian_writer(
- reinterpret_cast<char*>(&((*packet)[start_size])), 28);
- big_endian_writer.WriteU8(0x80);
- big_endian_writer.WriteU8(kPacketTypeSenderReport);
- big_endian_writer.WriteU16(number_of_rows);
- big_endian_writer.WriteU32(ssrc_);
- big_endian_writer.WriteU32(sender_info.ntp_seconds);
- big_endian_writer.WriteU32(sender_info.ntp_fraction);
- big_endian_writer.WriteU32(sender_info.rtp_timestamp);
- big_endian_writer.WriteU32(sender_info.send_packet_count);
- big_endian_writer.WriteU32(static_cast<uint32>(sender_info.send_octet_count));
- return;
+void RtcpBuilder::AddSR(const RtcpSenderInfo& sender_info) {
+ AddRtcpHeader(kPacketTypeSenderReport, 0);
+ writer_.WriteU32(ssrc_);
+ writer_.WriteU32(sender_info.ntp_seconds);
+ writer_.WriteU32(sender_info.ntp_fraction);
+ writer_.WriteU32(sender_info.rtp_timestamp);
+ writer_.WriteU32(sender_info.send_packet_count);
+ writer_.WriteU32(static_cast<uint32>(sender_info.send_octet_count));
}
/*
@@ -405,58 +354,31 @@ void RtcpSender::BuildSR(const RtcpSenderInfo& sender_info,
| delay since last RR (DLRR) |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
*/
-void RtcpSender::BuildDlrrRb(const RtcpDlrrReportBlock& dlrr,
- Packet* packet) const {
- size_t start_size = packet->size();
- if (start_size + 24 > kMaxIpPacketSize) {
- DLOG(FATAL) << "Not enough buffer space";
- return;
- }
-
- packet->resize(start_size + 24);
-
- base::BigEndianWriter big_endian_writer(
- reinterpret_cast<char*>(&((*packet)[start_size])), 24);
- big_endian_writer.WriteU8(0x80);
- big_endian_writer.WriteU8(kPacketTypeXr);
- big_endian_writer.WriteU16(5); // Length.
- big_endian_writer.WriteU32(ssrc_); // Add our own SSRC.
- big_endian_writer.WriteU8(5); // Add block type.
- big_endian_writer.WriteU8(0); // Add reserved.
- big_endian_writer.WriteU16(3); // Block length.
- big_endian_writer.WriteU32(ssrc_); // Add the media (received RTP) SSRC.
- big_endian_writer.WriteU32(dlrr.last_rr);
- big_endian_writer.WriteU32(dlrr.delay_since_last_rr);
- return;
+void RtcpBuilder::AddDlrrRb(const RtcpDlrrReportBlock& dlrr) {
+ AddRtcpHeader(kPacketTypeXr, 0);
+ writer_.WriteU32(ssrc_); // Add our own SSRC.
+ writer_.WriteU8(5); // Add block type.
+ writer_.WriteU8(0); // Add reserved.
+ writer_.WriteU16(3); // Block length.
+ writer_.WriteU32(ssrc_); // Add the media (received RTP) SSRC.
+ writer_.WriteU32(dlrr.last_rr);
+ writer_.WriteU32(dlrr.delay_since_last_rr);
}
-void RtcpSender::BuildReceiverLog(
- const ReceiverRtcpEventSubscriber::RtcpEventMultiMap& rtcp_events,
- Packet* packet) {
- const size_t packet_start_size = packet->size();
- size_t number_of_frames = 0;
+void RtcpBuilder::AddReceiverLog(
+ const ReceiverRtcpEventSubscriber::RtcpEventMultiMap& rtcp_events) {
size_t total_number_of_messages_to_send = 0;
- size_t rtcp_log_size = 0;
RtcpReceiverLogMessage receiver_log_message;
- if (!BuildRtcpReceiverLogMessage(rtcp_events,
- packet_start_size,
- &receiver_log_message,
- &number_of_frames,
- &total_number_of_messages_to_send,
- &rtcp_log_size)) {
+ if (!GetRtcpReceiverLogMessage(rtcp_events,
+ &receiver_log_message,
+ &total_number_of_messages_to_send)) {
return;
}
- packet->resize(packet_start_size + rtcp_log_size);
- base::BigEndianWriter big_endian_writer(
- reinterpret_cast<char*>(&((*packet)[packet_start_size])), rtcp_log_size);
- big_endian_writer.WriteU8(0x80 + kReceiverLogSubtype);
- big_endian_writer.WriteU8(kPacketTypeApplicationDefined);
- big_endian_writer.WriteU16(static_cast<uint16>(
- 2 + 2 * number_of_frames + total_number_of_messages_to_send));
- big_endian_writer.WriteU32(ssrc_); // Add our own SSRC.
- big_endian_writer.WriteU32(kCast);
+ AddRtcpHeader(kPacketTypeApplicationDefined, kReceiverLogSubtype);
+ writer_.WriteU32(ssrc_); // Add our own SSRC.
+ writer_.WriteU32(kCast);
while (!receiver_log_message.empty() &&
total_number_of_messages_to_send > 0) {
@@ -464,7 +386,7 @@ void RtcpSender::BuildReceiverLog(
receiver_log_message.front());
// Add our frame header.
- big_endian_writer.WriteU32(frame_log_messages.rtp_timestamp_);
+ writer_.WriteU32(frame_log_messages.rtp_timestamp_);
size_t messages_in_frame = frame_log_messages.event_log_messages_.size();
if (messages_in_frame > total_number_of_messages_to_send) {
// We are running out of space.
@@ -474,15 +396,15 @@ void RtcpSender::BuildReceiverLog(
total_number_of_messages_to_send -= messages_in_frame;
// On the wire format is number of messages - 1.
- big_endian_writer.WriteU8(static_cast<uint8>(messages_in_frame - 1));
+ writer_.WriteU8(static_cast<uint8>(messages_in_frame - 1));
base::TimeTicks event_timestamp_base =
frame_log_messages.event_log_messages_.front().event_timestamp;
uint32 base_timestamp_ms =
(event_timestamp_base - base::TimeTicks()).InMilliseconds();
- big_endian_writer.WriteU8(static_cast<uint8>(base_timestamp_ms >> 16));
- big_endian_writer.WriteU8(static_cast<uint8>(base_timestamp_ms >> 8));
- big_endian_writer.WriteU8(static_cast<uint8>(base_timestamp_ms));
+ writer_.WriteU8(static_cast<uint8>(base_timestamp_ms >> 16));
+ writer_.WriteU8(static_cast<uint8>(base_timestamp_ms >> 8));
+ writer_.WriteU8(static_cast<uint8>(base_timestamp_ms));
while (!frame_log_messages.event_log_messages_.empty() &&
messages_in_frame > 0) {
@@ -496,13 +418,13 @@ void RtcpSender::BuildReceiverLog(
case FRAME_ACK_SENT:
case FRAME_PLAYOUT:
case FRAME_DECODED:
- big_endian_writer.WriteU16(
+ writer_.WriteU16(
static_cast<uint16>(event_message.delay_delta.InMilliseconds()));
- big_endian_writer.WriteU16(event_type_and_timestamp_delta);
+ writer_.WriteU16(event_type_and_timestamp_delta);
break;
case PACKET_RECEIVED:
- big_endian_writer.WriteU16(event_message.packet_id);
- big_endian_writer.WriteU16(event_type_and_timestamp_delta);
+ writer_.WriteU16(event_message.packet_id);
+ writer_.WriteU16(event_type_and_timestamp_delta);
break;
default:
NOTREACHED();
@@ -518,15 +440,13 @@ void RtcpSender::BuildReceiverLog(
DCHECK_EQ(total_number_of_messages_to_send, 0u);
}
-bool RtcpSender::BuildRtcpReceiverLogMessage(
+bool RtcpBuilder::GetRtcpReceiverLogMessage(
const ReceiverRtcpEventSubscriber::RtcpEventMultiMap& rtcp_events,
- size_t start_size,
RtcpReceiverLogMessage* receiver_log_message,
- size_t* number_of_frames,
- size_t* total_number_of_messages_to_send,
- size_t* rtcp_log_size) {
+ size_t* total_number_of_messages_to_send) {
+ size_t number_of_frames = 0;
size_t remaining_space =
- std::min(kMaxReceiverLogBytes, kMaxIpPacketSize - start_size);
+ std::min<size_t>(kMaxReceiverLogBytes, writer_.remaining());
if (remaining_space < kRtcpCastLogHeaderSize + kRtcpReceiverFrameLogSize +
kRtcpReceiverEventLogSize) {
return false;
@@ -548,7 +468,7 @@ bool RtcpSender::BuildRtcpReceiverLogMessage(
const RtpTimestamp rtp_timestamp = rit->first;
RtcpReceiverFrameLogMessage frame_log(rtp_timestamp);
remaining_space -= kRtcpReceiverFrameLogSize;
- ++*number_of_frames;
+ ++number_of_frames;
// Get all events of a single frame.
sorted_log_messages.clear();
@@ -598,20 +518,20 @@ bool RtcpSender::BuildRtcpReceiverLogMessage(
// unlikely there will be a match anyway.
if (rtcp_events_history_.size() > kFirstRedundancyOffset) {
// Add first redundnacy messages, if enough space remaining
- AddReceiverLog(rtcp_events_history_[kFirstRedundancyOffset],
- receiver_log_message,
- &remaining_space,
- number_of_frames,
- total_number_of_messages_to_send);
+ AddReceiverLogEntries(rtcp_events_history_[kFirstRedundancyOffset],
+ receiver_log_message,
+ &remaining_space,
+ &number_of_frames,
+ total_number_of_messages_to_send);
}
if (rtcp_events_history_.size() > kSecondRedundancyOffset) {
// Add second redundancy messages, if enough space remaining
- AddReceiverLog(rtcp_events_history_[kSecondRedundancyOffset],
- receiver_log_message,
- &remaining_space,
- number_of_frames,
- total_number_of_messages_to_send);
+ AddReceiverLogEntries(rtcp_events_history_[kSecondRedundancyOffset],
+ receiver_log_message,
+ &remaining_space,
+ &number_of_frames,
+ total_number_of_messages_to_send);
}
if (rtcp_events_history_.size() > kReceiveLogMessageHistorySize) {
@@ -620,16 +540,9 @@ bool RtcpSender::BuildRtcpReceiverLogMessage(
DCHECK_LE(rtcp_events_history_.size(), kReceiveLogMessageHistorySize);
- *rtcp_log_size =
- kRtcpCastLogHeaderSize + *number_of_frames * kRtcpReceiverFrameLogSize +
- *total_number_of_messages_to_send * kRtcpReceiverEventLogSize;
- DCHECK_GE(kMaxIpPacketSize, start_size + *rtcp_log_size)
- << "Not enough buffer space.";
-
- VLOG(3) << "number of frames: " << *number_of_frames;
+ VLOG(3) << "number of frames: " << number_of_frames;
VLOG(3) << "total messages to send: " << *total_number_of_messages_to_send;
- VLOG(3) << "rtcp log size: " << *rtcp_log_size;
- return *number_of_frames > 0;
+ return number_of_frames > 0;
}
} // namespace cast
diff --git a/media/cast/net/rtcp/rtcp_sender.h b/media/cast/net/rtcp/rtcp_builder.h
index 06b11d4844..b530648e4f 100644
--- a/media/cast/net/rtcp/rtcp_sender.h
+++ b/media/cast/net/rtcp/rtcp_builder.h
@@ -2,13 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CAST_NET_RTCP_RTCP_SENDER_H_
-#define MEDIA_CAST_NET_RTCP_RTCP_SENDER_H_
+#ifndef MEDIA_CAST_NET_RTCP_RTCP_BUILDER_H_
+#define MEDIA_CAST_NET_RTCP_RTCP_BUILDER_H_
#include <deque>
#include <list>
#include <string>
+#include "base/big_endian.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_defines.h"
#include "media/cast/net/cast_transport_defines.h"
@@ -38,67 +39,49 @@ COMPILE_ASSERT(kSecondRedundancyOffset >
kReceiveLogMessageHistorySize,
redundancy_offset_out_of_range);
-class PacedPacketSender;
-// TODO(hclam): This should be renamed to RtcpPacketBuilder. The function
-// of this class is to only to build a RTCP packet but not to send it.
-class RtcpSender {
+class RtcpBuilder {
public:
- RtcpSender(PacedPacketSender* outgoing_transport,
- uint32 sending_ssrc);
- ~RtcpSender();
+ explicit RtcpBuilder(uint32 sending_ssrc);
+ ~RtcpBuilder();
- // TODO(hclam): This method should be to build a packet instead of
- // sending it.
- void SendRtcpFromRtpReceiver(
+ PacketRef BuildRtcpFromReceiver(
const RtcpReportBlock* report_block,
const RtcpReceiverReferenceTimeReport* rrtr,
const RtcpCastMessage* cast_message,
const ReceiverRtcpEventSubscriber::RtcpEventMultiMap* rtcp_events,
base::TimeDelta target_delay);
- // TODO(hclam): This method should be to build a packet instead of
- // sending it.
- void SendRtcpFromRtpSender(const RtcpSenderInfo& sender_info);
+ PacketRef BuildRtcpFromSender(const RtcpSenderInfo& sender_info);
private:
- void BuildRR(const RtcpReportBlock* report_block,
- Packet* packet) const;
-
- void AddReportBlocks(const RtcpReportBlock& report_block,
- Packet* packet) const;
-
- void BuildRrtr(const RtcpReceiverReferenceTimeReport* rrtr,
- Packet* packet) const;
-
- void BuildCast(const RtcpCastMessage* cast_message,
- base::TimeDelta target_delay,
- Packet* packet) const;
-
- void BuildSR(const RtcpSenderInfo& sender_info, Packet* packet) const;
-
- void BuildDlrrRb(const RtcpDlrrReportBlock& dlrr, Packet* packet) const;
-
- void BuildReceiverLog(
+ void AddRtcpHeader(RtcpPacketFields payload, int format_or_count);
+ void PatchLengthField();
+ void AddRR(const RtcpReportBlock* report_block);
+ void AddReportBlocks(const RtcpReportBlock& report_block);
+ void AddRrtr(const RtcpReceiverReferenceTimeReport* rrtr);
+ void AddCast(const RtcpCastMessage* cast_message,
+ base::TimeDelta target_delay);
+ void AddSR(const RtcpSenderInfo& sender_info);
+ void AddDlrrRb(const RtcpDlrrReportBlock& dlrr);
+ void AddReceiverLog(
+ const ReceiverRtcpEventSubscriber::RtcpEventMultiMap& rtcp_events);
+
+ bool GetRtcpReceiverLogMessage(
const ReceiverRtcpEventSubscriber::RtcpEventMultiMap& rtcp_events,
- Packet* packet);
-
- bool BuildRtcpReceiverLogMessage(
- const ReceiverRtcpEventSubscriber::RtcpEventMultiMap& rtcp_events,
- size_t start_size,
RtcpReceiverLogMessage* receiver_log_message,
- size_t* number_of_frames,
- size_t* total_number_of_messages_to_send,
- size_t* rtcp_log_size);
+ size_t* total_number_of_messages_to_send);
- const uint32 ssrc_;
-
- // Not owned by this class.
- PacedPacketSender* const transport_;
+ void Start();
+ PacketRef Finish();
+ base::BigEndianWriter writer_;
+ const uint32 ssrc_;
+ char* ptr_of_length_;
+ PacketRef packet_;
std::deque<RtcpReceiverLogMessage> rtcp_events_history_;
- DISALLOW_COPY_AND_ASSIGN(RtcpSender);
+ DISALLOW_COPY_AND_ASSIGN(RtcpBuilder);
};
} // namespace cast
diff --git a/media/cast/net/rtcp/rtcp_sender_unittest.cc b/media/cast/net/rtcp/rtcp_builder_unittest.cc
index 6dbea4d94e..e68444aef3 100644
--- a/media/cast/net/rtcp/rtcp_sender_unittest.cc
+++ b/media/cast/net/rtcp/rtcp_builder_unittest.cc
@@ -9,10 +9,9 @@
#include "media/cast/net/cast_transport_defines.h"
#include "media/cast/net/pacing/paced_sender.h"
#include "media/cast/net/rtcp/receiver_rtcp_event_subscriber.h"
-#include "media/cast/net/rtcp/rtcp_sender.h"
+#include "media/cast/net/rtcp/rtcp_builder.h"
#include "media/cast/net/rtcp/rtcp_utility.h"
#include "media/cast/net/rtcp/test_rtcp_packet_builder.h"
-#include "media/cast/test/fake_single_thread_task_runner.h"
#include "testing/gmock/include/gmock/gmock.h"
namespace media {
@@ -40,93 +39,50 @@ RtcpReportBlock GetReportBlock() {
} // namespace
-class TestRtcpTransport : public PacedPacketSender {
- public:
- TestRtcpTransport() : packet_count_(0) {}
-
- virtual bool SendRtcpPacket(uint32 ssrc,
- PacketRef packet) OVERRIDE {
- EXPECT_EQ(expected_packet_.size(), packet->data.size());
- if (expected_packet_.size() != packet->data.size())
- return false;
- EXPECT_EQ(0, memcmp(&expected_packet_[0],
- &packet->data[0],
- packet->data.size()));
- packet_count_++;
- return true;
- }
-
- virtual bool SendPackets(
- const SendPacketVector& packets) OVERRIDE {
- return false;
- }
- virtual bool ResendPackets(
- const SendPacketVector& packets,
- const DedupInfo& dedup_info) OVERRIDE {
- return false;
- }
-
- virtual void CancelSendingPacket(
- const PacketKey& packet_key) OVERRIDE {
- }
- void SetExpectedRtcpPacket(scoped_ptr<Packet> packet) {
- expected_packet_.swap(*packet);
+class RtcpBuilderTest : public ::testing::Test {
+ protected:
+ RtcpBuilderTest()
+ : rtcp_builder_(new RtcpBuilder(kSendingSsrc)) {}
+
+ void ExpectPacketEQ(scoped_ptr<Packet> golden_packet,
+ PacketRef packet) {
+ EXPECT_EQ(golden_packet->size(), packet->data.size());
+ if (golden_packet->size() == packet->data.size()) {
+ for (size_t x = 0; x < golden_packet->size(); x++) {
+ EXPECT_EQ((*golden_packet)[x], packet->data[x]);
+ if ((*golden_packet)[x] != packet->data[x])
+ break;
+ }
+ }
}
- int packet_count() const { return packet_count_; }
-
- private:
- Packet expected_packet_;
- int packet_count_;
+ scoped_ptr<RtcpBuilder> rtcp_builder_;
- DISALLOW_COPY_AND_ASSIGN(TestRtcpTransport);
-};
-
-class RtcpSenderTest : public ::testing::Test {
- protected:
- RtcpSenderTest()
- : testing_clock_(new base::SimpleTestTickClock()),
- task_runner_(new test::FakeSingleThreadTaskRunner(testing_clock_)),
- cast_environment_(new CastEnvironment(
- scoped_ptr<base::TickClock>(testing_clock_).Pass(),
- task_runner_,
- task_runner_,
- task_runner_)),
- rtcp_sender_(new RtcpSender(&test_transport_, kSendingSsrc)) {}
-
- base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment.
- TestRtcpTransport test_transport_;
- scoped_refptr<test::FakeSingleThreadTaskRunner> task_runner_;
- scoped_refptr<CastEnvironment> cast_environment_;
- scoped_ptr<RtcpSender> rtcp_sender_;
-
- DISALLOW_COPY_AND_ASSIGN(RtcpSenderTest);
+ DISALLOW_COPY_AND_ASSIGN(RtcpBuilderTest);
};
-TEST_F(RtcpSenderTest, RtcpReceiverReport) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReport) {
// Receiver report with report block.
TestRtcpPacketBuilder p2;
p2.AddRr(kSendingSsrc, 1);
p2.AddRb(kMediaSsrc);
- test_transport_.SetExpectedRtcpPacket(p2.GetPacket().Pass());
RtcpReportBlock report_block = GetReportBlock();
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block, NULL, NULL, NULL, kDefaultDelay);
-
- EXPECT_EQ(1, test_transport_.packet_count());
+ ExpectPacketEQ(
+ p2.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block, NULL, NULL, NULL, kDefaultDelay));
}
-TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtr) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReportWithRrtr) {
// Receiver report with report block.
TestRtcpPacketBuilder p;
p.AddRr(kSendingSsrc, 1);
p.AddRb(kMediaSsrc);
p.AddXrHeader(kSendingSsrc);
p.AddXrRrtrBlock();
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
RtcpReportBlock report_block = GetReportBlock();
@@ -134,23 +90,21 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtr) {
rrtr.ntp_seconds = kNtpHigh;
rrtr.ntp_fraction = kNtpLow;
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- &rrtr,
- NULL,
- NULL,
- kDefaultDelay);
-
- EXPECT_EQ(1, test_transport_.packet_count());
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ &rrtr,
+ NULL,
+ NULL,
+ kDefaultDelay));
}
-TEST_F(RtcpSenderTest, RtcpReceiverReportWithCast) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReportWithCast) {
// Receiver report with report block.
TestRtcpPacketBuilder p;
p.AddRr(kSendingSsrc, 1);
p.AddRb(kMediaSsrc);
p.AddCast(kSendingSsrc, kMediaSsrc, kDefaultDelay);
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
RtcpReportBlock report_block = GetReportBlock();
@@ -165,24 +119,22 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithCast) {
cast_message.missing_frames_and_packets[kFrameIdWithLostPackets] =
missing_packets;
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- NULL,
- &cast_message,
- NULL,
- kDefaultDelay);
-
- EXPECT_EQ(1, test_transport_.packet_count());
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ NULL,
+ &cast_message,
+ NULL,
+ kDefaultDelay));
}
-TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtraAndCastMessage) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReportWithRrtraAndCastMessage) {
TestRtcpPacketBuilder p;
p.AddRr(kSendingSsrc, 1);
p.AddRb(kMediaSsrc);
p.AddXrHeader(kSendingSsrc);
p.AddXrRrtrBlock();
p.AddCast(kSendingSsrc, kMediaSsrc, kDefaultDelay);
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
RtcpReportBlock report_block = GetReportBlock();
@@ -201,17 +153,16 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtraAndCastMessage) {
cast_message.missing_frames_and_packets[kFrameIdWithLostPackets] =
missing_packets;
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- &rrtr,
- &cast_message,
- NULL,
- kDefaultDelay);
-
- EXPECT_EQ(1, test_transport_.packet_count());
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ &rrtr,
+ &cast_message,
+ NULL,
+ kDefaultDelay));
}
-TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
static const uint32 kTimeBaseMs = 12345678;
static const uint32 kTimeDelayMs = 10;
@@ -221,7 +172,6 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
p.AddXrHeader(kSendingSsrc);
p.AddXrRrtrBlock();
p.AddCast(kSendingSsrc, kMediaSsrc, kDefaultDelay);
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
RtcpReportBlock report_block = GetReportBlock();
@@ -243,12 +193,13 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
ReceiverRtcpEventSubscriber event_subscriber(500, VIDEO_EVENT);
ReceiverRtcpEventSubscriber::RtcpEventMultiMap rtcp_events;
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- &rrtr,
- &cast_message,
- &rtcp_events,
- kDefaultDelay);
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ &rrtr,
+ &cast_message,
+ &rtcp_events,
+ kDefaultDelay));
base::SimpleTestTickClock testing_clock;
testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
@@ -258,8 +209,6 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
p.AddReceiverEventLog(0, FRAME_ACK_SENT, 0);
p.AddReceiverEventLog(kLostPacketId1, PACKET_RECEIVED, kTimeDelayMs);
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
-
FrameEvent frame_event;
frame_event.rtp_timestamp = kRtpTimestamp;
frame_event.type = FRAME_ACK_SENT;
@@ -278,17 +227,17 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
event_subscriber.GetRtcpEventsAndReset(&rtcp_events);
EXPECT_EQ(2u, rtcp_events.size());
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- &rrtr,
- &cast_message,
- &rtcp_events,
- kDefaultDelay);
-
- EXPECT_EQ(2, test_transport_.packet_count());
+ ExpectPacketEQ(
+ p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ &rrtr,
+ &cast_message,
+ &rtcp_events,
+ kDefaultDelay));
}
-TEST_F(RtcpSenderTest, RtcpReceiverReportWithOversizedFrameLog) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReportWithOversizedFrameLog) {
static const uint32 kTimeBaseMs = 12345678;
static const uint32 kTimeDelayMs = 10;
@@ -320,7 +269,6 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithOversizedFrameLog) {
static_cast<uint16>(kTimeDelayMs * i));
}
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
ReceiverRtcpEventSubscriber event_subscriber(500, VIDEO_EVENT);
FrameEvent frame_event;
@@ -344,17 +292,16 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithOversizedFrameLog) {
ReceiverRtcpEventSubscriber::RtcpEventMultiMap rtcp_events;
event_subscriber.GetRtcpEventsAndReset(&rtcp_events);
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- NULL,
- NULL,
- &rtcp_events,
- kDefaultDelay);
-
- EXPECT_EQ(1, test_transport_.packet_count());
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ NULL,
+ NULL,
+ &rtcp_events,
+ kDefaultDelay));
}
-TEST_F(RtcpSenderTest, RtcpReceiverReportWithTooManyLogFrames) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReportWithTooManyLogFrames) {
static const uint32 kTimeBaseMs = 12345678;
static const uint32 kTimeDelayMs = 10;
@@ -382,7 +329,6 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithTooManyLogFrames) {
p.AddReceiverFrameLog(kRtpTimestamp + i, 1, kTimeBaseMs + i * kTimeDelayMs);
p.AddReceiverEventLog(0, FRAME_ACK_SENT, 0);
}
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
ReceiverRtcpEventSubscriber event_subscriber(500, VIDEO_EVENT);
@@ -399,17 +345,16 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithTooManyLogFrames) {
ReceiverRtcpEventSubscriber::RtcpEventMultiMap rtcp_events;
event_subscriber.GetRtcpEventsAndReset(&rtcp_events);
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- NULL,
- NULL,
- &rtcp_events,
- kDefaultDelay);
-
- EXPECT_EQ(1, test_transport_.packet_count());
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ NULL,
+ NULL,
+ &rtcp_events,
+ kDefaultDelay));
}
-TEST_F(RtcpSenderTest, RtcpReceiverReportWithOldLogFrames) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReportWithOldLogFrames) {
static const uint32 kTimeBaseMs = 12345678;
TestRtcpPacketBuilder p;
@@ -431,7 +376,6 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithOldLogFrames) {
for (int i = 0; i < 10; ++i) {
p.AddReceiverEventLog(0, FRAME_ACK_SENT, i * kTimeBetweenEventsMs);
}
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
ReceiverRtcpEventSubscriber event_subscriber(500, VIDEO_EVENT);
for (int i = 0; i < 11; ++i) {
@@ -448,17 +392,16 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportWithOldLogFrames) {
ReceiverRtcpEventSubscriber::RtcpEventMultiMap rtcp_events;
event_subscriber.GetRtcpEventsAndReset(&rtcp_events);
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- NULL,
- NULL,
- &rtcp_events,
- kDefaultDelay);
-
- EXPECT_EQ(1, test_transport_.packet_count());
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ NULL,
+ NULL,
+ &rtcp_events,
+ kDefaultDelay));
}
-TEST_F(RtcpSenderTest, RtcpReceiverReportRedundancy) {
+TEST_F(RtcpBuilderTest, RtcpReceiverReportRedundancy) {
uint32 time_base_ms = 12345678;
int kTimeBetweenEventsMs = 10;
@@ -493,8 +436,6 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportRedundancy) {
p.AddReceiverFrameLog(kRtpTimestamp, 1, time_base_ms);
p.AddReceiverEventLog(0, FRAME_ACK_SENT, 0);
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
-
FrameEvent frame_event;
frame_event.rtp_timestamp = kRtpTimestamp;
frame_event.type = FRAME_ACK_SENT;
@@ -505,22 +446,21 @@ TEST_F(RtcpSenderTest, RtcpReceiverReportRedundancy) {
ReceiverRtcpEventSubscriber::RtcpEventMultiMap rtcp_events;
event_subscriber.GetRtcpEventsAndReset(&rtcp_events);
- rtcp_sender_->SendRtcpFromRtpReceiver(
- &report_block,
- NULL,
- NULL,
- &rtcp_events,
- kDefaultDelay);
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromReceiver(
+ &report_block,
+ NULL,
+ NULL,
+ &rtcp_events,
+ kDefaultDelay));
testing_clock.Advance(
base::TimeDelta::FromMilliseconds(kTimeBetweenEventsMs));
time_base_ms += kTimeBetweenEventsMs;
}
-
- EXPECT_EQ(static_cast<int>(packet_count), test_transport_.packet_count());
}
-TEST_F(RtcpSenderTest, RtcpSenderReport) {
+TEST_F(RtcpBuilderTest, RtcpSenderReport) {
RtcpSenderInfo sender_info;
sender_info.ntp_seconds = kNtpHigh;
sender_info.ntp_fraction = kNtpLow;
@@ -531,11 +471,9 @@ TEST_F(RtcpSenderTest, RtcpSenderReport) {
// Sender report.
TestRtcpPacketBuilder p;
p.AddSr(kSendingSsrc, 0);
- test_transport_.SetExpectedRtcpPacket(p.GetPacket().Pass());
-
- rtcp_sender_->SendRtcpFromRtpSender(sender_info);
- EXPECT_EQ(1, test_transport_.packet_count());
+ ExpectPacketEQ(p.GetPacket().Pass(),
+ rtcp_builder_->BuildRtcpFromSender(sender_info));
}
} // namespace cast
diff --git a/media/cast/net/rtcp/rtcp_defines.cc b/media/cast/net/rtcp/rtcp_defines.cc
index 3346f45c10..a296dc8e48 100644
--- a/media/cast/net/rtcp/rtcp_defines.cc
+++ b/media/cast/net/rtcp/rtcp_defines.cc
@@ -33,8 +33,5 @@ RtcpReceiverReferenceTimeReport::~RtcpReceiverReferenceTimeReport() {}
RtcpEvent::RtcpEvent() : type(UNKNOWN), packet_id(0u) {}
RtcpEvent::~RtcpEvent() {}
-RtcpRttReport::RtcpRttReport() {}
-RtcpRttReport::~RtcpRttReport() {}
-
} // namespace cast
} // namespace media
diff --git a/media/cast/net/rtcp/rtcp_defines.h b/media/cast/net/rtcp/rtcp_defines.h
index 92dced3008..3dd23ef6a4 100644
--- a/media/cast/net/rtcp/rtcp_defines.h
+++ b/media/cast/net/rtcp/rtcp_defines.h
@@ -104,21 +104,8 @@ struct RtcpEvent {
uint16 packet_id;
};
-struct RtcpRttReport {
- RtcpRttReport();
- ~RtcpRttReport();
-
- base::TimeDelta rtt;
- base::TimeDelta avg_rtt;
- base::TimeDelta min_rtt;
- base::TimeDelta max_rtt;
-};
-
typedef base::Callback<void(const RtcpCastMessage&)> RtcpCastMessageCallback;
-typedef base::Callback<void(base::TimeDelta,
- base::TimeDelta,
- base::TimeDelta,
- base::TimeDelta)> RtcpRttCallback;
+typedef base::Callback<void(base::TimeDelta)> RtcpRttCallback;
typedef
base::Callback<void(const RtcpReceiverLogMessage&)> RtcpLogMessageCallback;
diff --git a/media/cast/net/rtcp/rtcp_unittest.cc b/media/cast/net/rtcp/rtcp_unittest.cc
index baa0699bdd..0b0d32785b 100644
--- a/media/cast/net/rtcp/rtcp_unittest.cc
+++ b/media/cast/net/rtcp/rtcp_unittest.cc
@@ -7,11 +7,9 @@
#include "base/test/simple_test_tick_clock.h"
#include "media/cast/cast_defines.h"
#include "media/cast/net/cast_transport_config.h"
-#include "media/cast/net/cast_transport_sender_impl.h"
#include "media/cast/net/pacing/paced_sender.h"
#include "media/cast/net/rtcp/rtcp.h"
-#include "media/cast/net/rtcp/test_rtcp_packet_builder.h"
-#include "media/cast/test/fake_single_thread_task_runner.h"
+#include "media/cast/test/skewed_tick_clock.h"
#include "testing/gmock/include/gmock/gmock.h"
namespace media {
@@ -21,79 +19,26 @@ using testing::_;
static const uint32 kSenderSsrc = 0x10203;
static const uint32 kReceiverSsrc = 0x40506;
-static const int64 kAddedDelay = 123;
-static const int64 kAddedShortDelay = 100;
+static const int kInitialReceiverClockOffsetSeconds = -5;
-class RtcpTestPacketSender : public PacketSender {
+class FakeRtcpTransport : public PacedPacketSender {
public:
- explicit RtcpTestPacketSender(base::SimpleTestTickClock* testing_clock)
- : drop_packets_(false),
- short_delay_(false),
- rtcp_receiver_(NULL),
- testing_clock_(testing_clock) {}
- virtual ~RtcpTestPacketSender() {}
- // Packet lists imply a RTP packet.
- void set_rtcp_receiver(Rtcp* rtcp) { rtcp_receiver_ = rtcp; }
-
- void set_short_delay() { short_delay_ = true; }
-
- void set_drop_packets(bool drop_packets) { drop_packets_ = drop_packets; }
-
- // A singular packet implies a RTCP packet.
- virtual bool SendPacket(PacketRef packet,
- const base::Closure& cb) OVERRIDE {
- if (short_delay_) {
- testing_clock_->Advance(
- base::TimeDelta::FromMilliseconds(kAddedShortDelay));
- } else {
- testing_clock_->Advance(base::TimeDelta::FromMilliseconds(kAddedDelay));
- }
- if (drop_packets_)
- return true;
-
- rtcp_receiver_->IncomingRtcpPacket(&packet->data[0], packet->data.size());
- return true;
- }
-
- private:
- bool drop_packets_;
- bool short_delay_;
- Rtcp* rtcp_receiver_;
- base::SimpleTestTickClock* testing_clock_;
+ explicit FakeRtcpTransport(base::SimpleTestTickClock* clock)
+ : clock_(clock),
+ packet_delay_(base::TimeDelta::FromMilliseconds(42)) {}
- DISALLOW_COPY_AND_ASSIGN(RtcpTestPacketSender);
-};
-
-class LocalRtcpTransport : public PacedPacketSender {
- public:
- explicit LocalRtcpTransport(base::SimpleTestTickClock* testing_clock)
- : drop_packets_(false),
- short_delay_(false),
- testing_clock_(testing_clock) {}
+ void set_rtcp_destination(Rtcp* rtcp) { rtcp_ = rtcp; }
- void set_rtcp_receiver(Rtcp* rtcp) { rtcp_ = rtcp; }
-
- void set_short_delay() { short_delay_ = true; }
-
- void set_drop_packets(bool drop_packets) { drop_packets_ = drop_packets; }
-
- virtual bool SendRtcpPacket(uint32 ssrc,
- PacketRef packet) OVERRIDE {
- if (short_delay_) {
- testing_clock_->Advance(
- base::TimeDelta::FromMilliseconds(kAddedShortDelay));
- } else {
- testing_clock_->Advance(base::TimeDelta::FromMilliseconds(kAddedDelay));
- }
- if (drop_packets_)
- return true;
+ base::TimeDelta packet_delay() const { return packet_delay_; }
+ void set_packet_delay(base::TimeDelta delay) { packet_delay_ = delay; }
+ virtual bool SendRtcpPacket(uint32 ssrc, PacketRef packet) OVERRIDE {
+ clock_->Advance(packet_delay_);
rtcp_->IncomingRtcpPacket(&packet->data[0], packet->data.size());
return true;
}
- virtual bool SendPackets(
- const SendPacketVector& packets) OVERRIDE {
+ virtual bool SendPackets(const SendPacketVector& packets) OVERRIDE {
return false;
}
@@ -102,23 +47,21 @@ class LocalRtcpTransport : public PacedPacketSender {
return false;
}
- virtual void CancelSendingPacket(
- const PacketKey& packet_key) OVERRIDE {
+ virtual void CancelSendingPacket(const PacketKey& packet_key) OVERRIDE {
}
private:
- bool drop_packets_;
- bool short_delay_;
+ base::SimpleTestTickClock* const clock_;
+ base::TimeDelta packet_delay_;
Rtcp* rtcp_;
- base::SimpleTestTickClock* testing_clock_;
- DISALLOW_COPY_AND_ASSIGN(LocalRtcpTransport);
+ DISALLOW_COPY_AND_ASSIGN(FakeRtcpTransport);
};
-class MockReceiverStats : public RtpReceiverStatistics {
+class FakeReceiverStats : public RtpReceiverStatistics {
public:
- MockReceiverStats() {}
- virtual ~MockReceiverStats() {}
+ FakeReceiverStats() {}
+ virtual ~FakeReceiverStats() {}
virtual void GetStatistics(uint8* fraction_lost,
uint32* cumulative_lost,
@@ -131,7 +74,7 @@ class MockReceiverStats : public RtpReceiverStatistics {
}
private:
- DISALLOW_COPY_AND_ASSIGN(MockReceiverStats);
+ DISALLOW_COPY_AND_ASSIGN(FakeReceiverStats);
};
class MockFrameSender {
@@ -141,11 +84,8 @@ class MockFrameSender {
MOCK_METHOD1(OnReceivedCastFeedback,
void(const RtcpCastMessage& cast_message));
- MOCK_METHOD4(OnReceivedRtt,
- void(base::TimeDelta rtt,
- base::TimeDelta avg_rtt,
- base::TimeDelta min_rtt,
- base::TimeDelta max_rtt));
+ MOCK_METHOD1(OnMeasuredRoundTripTime, void(base::TimeDelta rtt));
+
private:
DISALLOW_COPY_AND_ASSIGN(MockFrameSender);
};
@@ -153,250 +93,134 @@ class MockFrameSender {
class RtcpTest : public ::testing::Test {
protected:
RtcpTest()
- : testing_clock_(new base::SimpleTestTickClock()),
- task_runner_(new test::FakeSingleThreadTaskRunner(testing_clock_)),
- sender_to_receiver_(testing_clock_),
- receiver_to_sender_(testing_clock_) {
- testing_clock_->Advance(base::TimeTicks::Now() - base::TimeTicks());
+ : sender_clock_(new base::SimpleTestTickClock()),
+ receiver_clock_(new test::SkewedTickClock(sender_clock_.get())),
+ sender_to_receiver_(sender_clock_.get()),
+ receiver_to_sender_(sender_clock_.get()),
+ rtcp_for_sender_(base::Bind(&MockFrameSender::OnReceivedCastFeedback,
+ base::Unretained(&mock_frame_sender_)),
+ base::Bind(&MockFrameSender::OnMeasuredRoundTripTime,
+ base::Unretained(&mock_frame_sender_)),
+ RtcpLogMessageCallback(),
+ sender_clock_.get(),
+ &sender_to_receiver_,
+ kSenderSsrc,
+ kReceiverSsrc),
+ rtcp_for_receiver_(RtcpCastMessageCallback(),
+ RtcpRttCallback(),
+ RtcpLogMessageCallback(),
+ receiver_clock_.get(),
+ &receiver_to_sender_,
+ kReceiverSsrc,
+ kSenderSsrc) {
+ sender_clock_->Advance(base::TimeTicks::Now() - base::TimeTicks());
+ receiver_clock_->SetSkew(
+ 1.0, // No skew.
+ base::TimeDelta::FromSeconds(kInitialReceiverClockOffsetSeconds));
+
+ sender_to_receiver_.set_rtcp_destination(&rtcp_for_receiver_);
+ receiver_to_sender_.set_rtcp_destination(&rtcp_for_sender_);
}
virtual ~RtcpTest() {}
- static void UpdateCastTransportStatus(CastTransportStatus status) {
- bool result = (status == TRANSPORT_AUDIO_INITIALIZED ||
- status == TRANSPORT_VIDEO_INITIALIZED);
- EXPECT_TRUE(result);
- }
-
- void RunTasks(int during_ms) {
- for (int i = 0; i < during_ms; ++i) {
- // Call process the timers every 1 ms.
- testing_clock_->Advance(base::TimeDelta::FromMilliseconds(1));
- task_runner_->RunTasks();
- }
- }
-
- base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment.
- scoped_refptr<test::FakeSingleThreadTaskRunner> task_runner_;
- LocalRtcpTransport sender_to_receiver_;
- LocalRtcpTransport receiver_to_sender_;
+ scoped_ptr<base::SimpleTestTickClock> sender_clock_;
+ scoped_ptr<test::SkewedTickClock> receiver_clock_;
+ FakeRtcpTransport sender_to_receiver_;
+ FakeRtcpTransport receiver_to_sender_;
MockFrameSender mock_frame_sender_;
- MockReceiverStats stats_;
+ Rtcp rtcp_for_sender_;
+ Rtcp rtcp_for_receiver_;
+ FakeReceiverStats stats_;
DISALLOW_COPY_AND_ASSIGN(RtcpTest);
};
-TEST_F(RtcpTest, BasicSenderReport) {
- Rtcp rtcp(base::Bind(&MockFrameSender::OnReceivedCastFeedback,
- base::Unretained(&mock_frame_sender_)),
- base::Bind(&MockFrameSender::OnReceivedRtt,
- base::Unretained(&mock_frame_sender_)),
- RtcpLogMessageCallback(),
- testing_clock_,
- &sender_to_receiver_,
- kSenderSsrc,
- kReceiverSsrc);
- sender_to_receiver_.set_rtcp_receiver(&rtcp);
- rtcp.SendRtcpFromRtpSender(base::TimeTicks(), 0, 1, 1);
-}
-
-TEST_F(RtcpTest, BasicReceiverReport) {
- Rtcp rtcp(base::Bind(&MockFrameSender::OnReceivedCastFeedback,
- base::Unretained(&mock_frame_sender_)),
- base::Bind(&MockFrameSender::OnReceivedRtt,
- base::Unretained(&mock_frame_sender_)),
- RtcpLogMessageCallback(),
- testing_clock_,
- &receiver_to_sender_,
- kSenderSsrc,
- kReceiverSsrc);
- receiver_to_sender_.set_rtcp_receiver(&rtcp);
- rtcp.SendRtcpFromRtpReceiver(NULL, base::TimeDelta(), NULL, &stats_);
-}
-
-TEST_F(RtcpTest, BasicCast) {
- EXPECT_CALL(mock_frame_sender_, OnReceivedCastFeedback(_)).Times(1);
-
- // Media sender.
- Rtcp rtcp(base::Bind(&MockFrameSender::OnReceivedCastFeedback,
- base::Unretained(&mock_frame_sender_)),
- base::Bind(&MockFrameSender::OnReceivedRtt,
- base::Unretained(&mock_frame_sender_)),
- RtcpLogMessageCallback(),
- testing_clock_,
- &receiver_to_sender_,
- kSenderSsrc,
- kSenderSsrc);
- receiver_to_sender_.set_rtcp_receiver(&rtcp);
- RtcpCastMessage cast_message(kSenderSsrc);
- cast_message.ack_frame_id = kAckFrameId;
- PacketIdSet missing_packets;
- cast_message.missing_frames_and_packets[kLostFrameId] = missing_packets;
-
- missing_packets.insert(kLostPacketId1);
- missing_packets.insert(kLostPacketId2);
- missing_packets.insert(kLostPacketId3);
- cast_message.missing_frames_and_packets[kFrameIdWithLostPackets] =
- missing_packets;
- rtcp.SendRtcpFromRtpReceiver(&cast_message, base::TimeDelta(), NULL, NULL);
+TEST_F(RtcpTest, LipSyncGleanedFromSenderReport) {
+ // Initially, expect no lip-sync info receiver-side without having first
+ // received a RTCP packet.
+ base::TimeTicks reference_time;
+ uint32 rtp_timestamp;
+ ASSERT_FALSE(rtcp_for_receiver_.GetLatestLipSyncTimes(&rtp_timestamp,
+ &reference_time));
+
+ // Send a Sender Report to the receiver.
+ const base::TimeTicks reference_time_sent = sender_clock_->NowTicks();
+ const uint32 rtp_timestamp_sent = 0xbee5;
+ rtcp_for_sender_.SendRtcpFromRtpSender(
+ reference_time_sent, rtp_timestamp_sent, 1, 1);
+
+ // Now the receiver should have lip-sync info. Confirm that the lip-sync
+ // reference time is the same as that sent.
+ EXPECT_TRUE(rtcp_for_receiver_.GetLatestLipSyncTimes(&rtp_timestamp,
+ &reference_time));
+ const base::TimeTicks rolled_back_time =
+ (reference_time -
+ // Roll-back relative clock offset:
+ base::TimeDelta::FromSeconds(kInitialReceiverClockOffsetSeconds) -
+ // Roll-back packet transmission time (because RTT is not yet known):
+ sender_to_receiver_.packet_delay());
+ EXPECT_NEAR(0, (reference_time_sent - rolled_back_time).InMicroseconds(), 5);
+ EXPECT_EQ(rtp_timestamp_sent, rtp_timestamp);
}
-TEST_F(RtcpTest, RttReducedSizeRtcp) {
- // Media receiver.
- Rtcp rtcp_receiver(RtcpCastMessageCallback(),
- RtcpRttCallback(),
- RtcpLogMessageCallback(),
- testing_clock_,
- &receiver_to_sender_,
- kReceiverSsrc,
- kSenderSsrc);
-
- // Media sender.
- Rtcp rtcp_sender(base::Bind(&MockFrameSender::OnReceivedCastFeedback,
- base::Unretained(&mock_frame_sender_)),
- base::Bind(&MockFrameSender::OnReceivedRtt,
- base::Unretained(&mock_frame_sender_)),
- RtcpLogMessageCallback(),
- testing_clock_,
- &sender_to_receiver_,
- kSenderSsrc,
- kReceiverSsrc);
-
- sender_to_receiver_.set_rtcp_receiver(&rtcp_receiver);
- receiver_to_sender_.set_rtcp_receiver(&rtcp_sender);
-
- base::TimeDelta rtt;
- base::TimeDelta avg_rtt;
- base::TimeDelta min_rtt;
- base::TimeDelta max_rtt;
- EXPECT_FALSE(rtcp_sender.Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt));
-
- rtcp_sender.SendRtcpFromRtpSender(testing_clock_->NowTicks(), 1, 1, 1);
- RunTasks(33);
- rtcp_receiver.SendRtcpFromRtpReceiver(NULL, base::TimeDelta(), NULL, &stats_);
- EXPECT_TRUE(rtcp_sender.Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt));
- rtcp_sender.SendRtcpFromRtpSender(testing_clock_->NowTicks(), 2, 1, 1);
- RunTasks(33);
-}
-
-TEST_F(RtcpTest, Rtt) {
- // Media receiver.
- Rtcp rtcp_receiver(RtcpCastMessageCallback(),
- RtcpRttCallback(),
- RtcpLogMessageCallback(),
- testing_clock_,
- &receiver_to_sender_,
- kReceiverSsrc,
- kSenderSsrc);
-
- // Media sender.
- Rtcp rtcp_sender(base::Bind(&MockFrameSender::OnReceivedCastFeedback,
- base::Unretained(&mock_frame_sender_)),
- base::Bind(&MockFrameSender::OnReceivedRtt,
- base::Unretained(&mock_frame_sender_)),
- RtcpLogMessageCallback(),
- testing_clock_,
- &sender_to_receiver_,
- kSenderSsrc,
- kReceiverSsrc);
-
- receiver_to_sender_.set_rtcp_receiver(&rtcp_sender);
- sender_to_receiver_.set_rtcp_receiver(&rtcp_receiver);
-
- base::TimeDelta rtt;
- base::TimeDelta avg_rtt;
- base::TimeDelta min_rtt;
- base::TimeDelta max_rtt;
- EXPECT_FALSE(rtcp_sender.Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt));
-
- rtcp_sender.SendRtcpFromRtpSender(testing_clock_->NowTicks(), 1, 1, 1);
- RunTasks(33);
- rtcp_receiver.SendRtcpFromRtpReceiver(NULL, base::TimeDelta(), NULL, &stats_);
-
- EXPECT_TRUE(rtcp_sender.Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt));
- RunTasks(33);
-
- RunTasks(33);
-
- EXPECT_NEAR(2 * kAddedDelay, rtt.InMilliseconds(), 2);
- EXPECT_NEAR(2 * kAddedDelay, avg_rtt.InMilliseconds(), 2);
- EXPECT_NEAR(2 * kAddedDelay, min_rtt.InMilliseconds(), 2);
- EXPECT_NEAR(2 * kAddedDelay, max_rtt.InMilliseconds(), 2);
-
- rtcp_sender.SendRtcpFromRtpSender(testing_clock_->NowTicks(), 2, 1, 1);
- RunTasks(33);
-
- receiver_to_sender_.set_short_delay();
- sender_to_receiver_.set_short_delay();
- rtcp_receiver.SendRtcpFromRtpReceiver(NULL, base::TimeDelta(), NULL, &stats_);
- EXPECT_TRUE(rtcp_sender.Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt));
- EXPECT_NEAR(kAddedDelay + kAddedShortDelay, rtt.InMilliseconds(), 2);
- EXPECT_NEAR(
- (kAddedShortDelay + 3 * kAddedDelay) / 2, avg_rtt.InMilliseconds(), 2);
- EXPECT_NEAR(kAddedDelay + kAddedShortDelay, min_rtt.InMilliseconds(), 2);
- EXPECT_NEAR(2 * kAddedDelay, max_rtt.InMilliseconds(), 2);
-
- rtcp_sender.SendRtcpFromRtpSender(testing_clock_->NowTicks(), 3, 1, 1);
- RunTasks(33);
-
- rtcp_receiver.SendRtcpFromRtpReceiver(NULL, base::TimeDelta(), NULL, &stats_);
- EXPECT_TRUE(rtcp_sender.Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt));
- EXPECT_NEAR(2 * kAddedShortDelay, rtt.InMilliseconds(), 2);
- EXPECT_NEAR(2 * kAddedShortDelay, min_rtt.InMilliseconds(), 2);
- EXPECT_NEAR(2 * kAddedDelay, max_rtt.InMilliseconds(), 2);
-
- rtcp_receiver.SendRtcpFromRtpReceiver(NULL, base::TimeDelta(), NULL, &stats_);
- EXPECT_TRUE(rtcp_sender.Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt));
- EXPECT_NEAR(2 * kAddedShortDelay, rtt.InMilliseconds(), 2);
- EXPECT_NEAR(2 * kAddedShortDelay, min_rtt.InMilliseconds(), 2);
- EXPECT_NEAR(2 * kAddedDelay, max_rtt.InMilliseconds(), 2);
-}
-
-TEST_F(RtcpTest, RttWithPacketLoss) {
- // Media receiver.
- Rtcp rtcp_receiver(RtcpCastMessageCallback(),
- RtcpRttCallback(),
- RtcpLogMessageCallback(),
- testing_clock_,
- &receiver_to_sender_,
- kReceiverSsrc,
- kSenderSsrc);
-
- // Media sender.
- Rtcp rtcp_sender(base::Bind(&MockFrameSender::OnReceivedCastFeedback,
- base::Unretained(&mock_frame_sender_)),
- base::Bind(&MockFrameSender::OnReceivedRtt,
- base::Unretained(&mock_frame_sender_)),
- RtcpLogMessageCallback(),
- testing_clock_,
- &sender_to_receiver_,
- kSenderSsrc,
- kReceiverSsrc);
-
- receiver_to_sender_.set_rtcp_receiver(&rtcp_sender);
- sender_to_receiver_.set_rtcp_receiver(&rtcp_receiver);
-
- rtcp_receiver.SendRtcpFromRtpReceiver(NULL, base::TimeDelta(), NULL, &stats_);
- rtcp_sender.SendRtcpFromRtpSender(testing_clock_->NowTicks(), 0, 1, 1);
- RunTasks(33);
-
- base::TimeDelta rtt;
- base::TimeDelta avg_rtt;
- base::TimeDelta min_rtt;
- base::TimeDelta max_rtt;
- EXPECT_FALSE(rtcp_sender.Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt));
-
- receiver_to_sender_.set_short_delay();
- sender_to_receiver_.set_short_delay();
- receiver_to_sender_.set_drop_packets(true);
-
- rtcp_receiver.SendRtcpFromRtpReceiver(NULL, base::TimeDelta(), NULL, &stats_);
- rtcp_sender.SendRtcpFromRtpSender(testing_clock_->NowTicks(), 1, 1, 1);
- RunTasks(33);
-
+// TODO(miu): There were a few tests here that didn't actually test anything
+// except that the code wouldn't crash and a callback method was invoked. We
+// need to fill-in more testing of RTCP now that much of the refactoring work
+// has been completed.
+
+TEST_F(RtcpTest, RoundTripTimesDeterminedFromReportPingPong) {
+ const int iterations = 12;
+ EXPECT_CALL(mock_frame_sender_, OnMeasuredRoundTripTime(_))
+ .Times(iterations);
+
+ // Initially, neither side knows the round trip time.
+ ASSERT_EQ(base::TimeDelta(), rtcp_for_sender_.current_round_trip_time());
+ ASSERT_EQ(base::TimeDelta(), rtcp_for_receiver_.current_round_trip_time());
+
+ // Do a number of ping-pongs, checking how the round trip times are measured
+ // by the sender and receiver.
+ base::TimeDelta expected_rtt_according_to_sender;
+ base::TimeDelta expected_rtt_according_to_receiver;
+ for (int i = 0; i < iterations; ++i) {
+ const base::TimeDelta one_way_trip_time =
+ base::TimeDelta::FromMilliseconds(1 << i);
+ sender_to_receiver_.set_packet_delay(one_way_trip_time);
+ receiver_to_sender_.set_packet_delay(one_way_trip_time);
+
+ // Sender --> Receiver
+ base::TimeTicks reference_time_sent = sender_clock_->NowTicks();
+ uint32 rtp_timestamp_sent = 0xbee5 + i;
+ rtcp_for_sender_.SendRtcpFromRtpSender(
+ reference_time_sent, rtp_timestamp_sent, 1, 1);
+ EXPECT_EQ(expected_rtt_according_to_sender,
+ rtcp_for_sender_.current_round_trip_time());
+#ifdef SENDER_PROVIDES_REPORT_BLOCK
+ EXPECT_EQ(expected_rtt_according_to_receiver,
+ rtcp_for_receiver_.current_round_trip_time());
+#endif
+
+ // Receiver --> Sender
+ rtcp_for_receiver_.SendRtcpFromRtpReceiver(
+ NULL, base::TimeDelta(), NULL, &stats_);
+ expected_rtt_according_to_sender = one_way_trip_time * 2;
+ EXPECT_EQ(expected_rtt_according_to_sender,
+ rtcp_for_sender_.current_round_trip_time());
+#ifdef SENDER_PROVIDES_REPORT_BLOCK
+ EXPECT_EQ(expected_rtt_according_to_receiver,
+ rtcp_for_receiver_.current_round_trip_time();
+#endif
+
+ // In the next iteration of this loop, after the receiver gets the sender
+ // report, it will be measuring a round trip time consisting of two
+ // different one-way trip times.
+ expected_rtt_according_to_receiver =
+ (one_way_trip_time + one_way_trip_time * 2) / 2;
+ }
}
-TEST_F(RtcpTest, NtpAndTime) {
+// TODO(miu): Find a better home for this test.
+TEST(MisplacedCastTest, NtpAndTime) {
const int64 kSecondsbetweenYear1900and2010 = INT64_C(40176 * 24 * 60 * 60);
const int64 kSecondsbetweenYear1900and2030 = INT64_C(47481 * 24 * 60 * 60);
diff --git a/media/cast/net/rtcp/rtcp_utility.cc b/media/cast/net/rtcp/rtcp_utility.cc
index a1a6a48c1d..3cfb8ea704 100644
--- a/media/cast/net/rtcp/rtcp_utility.cc
+++ b/media/cast/net/rtcp/rtcp_utility.cc
@@ -215,7 +215,7 @@ bool RtcpParser::ParseCastReceiverLogFrameItem(
event_log.packet_id = delay_delta_or_packet_id;
} else {
event_log.delay_delta = base::TimeDelta::FromMilliseconds(
- delay_delta_or_packet_id);
+ static_cast<int16>(delay_delta_or_packet_id));
}
frame_log.event_log_messages_.push_back(event_log);
}
diff --git a/media/cast/net/rtcp/rtcp_utility_unittest.cc b/media/cast/net/rtcp/rtcp_utility_unittest.cc
index ed2ab8d663..ec3875553b 100644
--- a/media/cast/net/rtcp/rtcp_utility_unittest.cc
+++ b/media/cast/net/rtcp/rtcp_utility_unittest.cc
@@ -367,7 +367,7 @@ TEST_F(RtcpParserTest, InjectReceiverReportWithReceiverLogVerificationBase) {
TEST_F(RtcpParserTest, InjectReceiverReportWithReceiverLogVerificationMulti) {
static const uint32 kTimeBaseMs = 12345678;
static const uint32 kTimeDelayMs = 10;
- static const uint32 kDelayDeltaMs = 123;
+ static const int kDelayDeltaMs = 123; // To be varied for every frame.
base::SimpleTestTickClock testing_clock;
testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
@@ -378,7 +378,8 @@ TEST_F(RtcpParserTest, InjectReceiverReportWithReceiverLogVerificationMulti) {
RtcpReceiverEventLogMessage event_log;
event_log.type = FRAME_ACK_SENT;
event_log.event_timestamp = testing_clock.NowTicks();
- event_log.delay_delta = base::TimeDelta::FromMilliseconds(kDelayDeltaMs);
+ event_log.delay_delta =
+ base::TimeDelta::FromMilliseconds((j - 50) * kDelayDeltaMs);
frame_log.event_log_messages_.push_back(event_log);
receiver_log.push_back(frame_log);
testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
@@ -390,7 +391,8 @@ TEST_F(RtcpParserTest, InjectReceiverReportWithReceiverLogVerificationMulti) {
p.AddReceiverLog(kSenderSsrc);
for (int i = 0; i < 100; ++i) {
p.AddReceiverFrameLog(kRtpTimestamp, 1, kTimeBaseMs + i * kTimeDelayMs);
- p.AddReceiverEventLog(kDelayDeltaMs, FRAME_ACK_SENT, 0);
+ const int delay = (i - 50) * kDelayDeltaMs;
+ p.AddReceiverEventLog(static_cast<uint16>(delay), FRAME_ACK_SENT, 0);
}
RtcpParser parser(kSourceSsrc, kSenderSsrc);
diff --git a/media/cast/net/rtp/cast_message_builder.cc b/media/cast/net/rtp/cast_message_builder.cc
index 42d6b98e0b..3d1d7a7748 100644
--- a/media/cast/net/rtp/cast_message_builder.cc
+++ b/media/cast/net/rtp/cast_message_builder.cc
@@ -5,6 +5,7 @@
#include "media/cast/net/rtp/cast_message_builder.h"
#include "media/cast/cast_defines.h"
+#include "media/cast/net/rtp/framer.h"
namespace media {
namespace cast {
@@ -12,13 +13,13 @@ namespace cast {
CastMessageBuilder::CastMessageBuilder(
base::TickClock* clock,
RtpPayloadFeedback* incoming_payload_feedback,
- FrameIdMap* frame_id_map,
+ const Framer* framer,
uint32 media_ssrc,
bool decoder_faster_than_max_frame_rate,
int max_unacked_frames)
: clock_(clock),
cast_feedback_(incoming_payload_feedback),
- frame_id_map_(frame_id_map),
+ framer_(framer),
media_ssrc_(media_ssrc),
decoder_faster_than_max_frame_rate_(decoder_faster_than_max_frame_rate),
max_unacked_frames_(max_unacked_frames),
@@ -51,7 +52,7 @@ void CastMessageBuilder::CompleteFrameReceived(uint32 frame_id) {
bool CastMessageBuilder::UpdateAckMessage(uint32 frame_id) {
if (!decoder_faster_than_max_frame_rate_) {
- int complete_frame_count = frame_id_map_->NumberOfCompleteFrames();
+ int complete_frame_count = framer_->NumberOfCompleteFrames();
if (complete_frame_count > max_unacked_frames_) {
// We have too many frames pending in our framer; slow down ACK.
if (!slowing_down_ack_) {
@@ -95,7 +96,7 @@ bool CastMessageBuilder::UpdateAckMessage(uint32 frame_id) {
bool CastMessageBuilder::TimeToSendNextCastMessage(
base::TimeTicks* time_to_send) {
// We haven't received any packets.
- if (last_update_time_.is_null() && frame_id_map_->Empty())
+ if (last_update_time_.is_null() && framer_->Empty())
return false;
*time_to_send = last_update_time_ + base::TimeDelta::FromMilliseconds(
@@ -120,7 +121,7 @@ void CastMessageBuilder::Reset() {
bool CastMessageBuilder::UpdateCastMessageInternal(RtcpCastMessage* message) {
if (last_update_time_.is_null()) {
- if (!frame_id_map_->Empty()) {
+ if (!framer_->Empty()) {
// We have received packets.
last_update_time_ = clock_->NowTicks();
}
@@ -148,10 +149,10 @@ void CastMessageBuilder::BuildPacketList() {
cast_msg_.missing_frames_and_packets.clear();
// Are we missing packets?
- if (frame_id_map_->Empty())
+ if (framer_->Empty())
return;
- uint32 newest_frame_id = frame_id_map_->NewestFrameId();
+ uint32 newest_frame_id = framer_->NewestFrameId();
uint32 next_expected_frame_id = cast_msg_.ack_frame_id + 1;
// Iterate over all frames.
@@ -169,9 +170,9 @@ void CastMessageBuilder::BuildPacketList() {
}
PacketIdSet missing;
- if (frame_id_map_->FrameExists(next_expected_frame_id)) {
+ if (framer_->FrameExists(next_expected_frame_id)) {
bool last_frame = (newest_frame_id == next_expected_frame_id);
- frame_id_map_->GetMissingPackets(
+ framer_->GetMissingPackets(
next_expected_frame_id, last_frame, &missing);
if (!missing.empty()) {
time_last_nacked_map_[next_expected_frame_id] = now;
diff --git a/media/cast/net/rtp/cast_message_builder.h b/media/cast/net/rtp/cast_message_builder.h
index 0136fed4a0..58d8e9a2bc 100644
--- a/media/cast/net/rtp/cast_message_builder.h
+++ b/media/cast/net/rtp/cast_message_builder.h
@@ -11,12 +11,12 @@
#include <map>
#include "media/cast/net/rtcp/rtcp.h"
-#include "media/cast/net/rtp/frame_id_map.h"
#include "media/cast/net/rtp/rtp_receiver_defines.h"
namespace media {
namespace cast {
+class Framer;
class RtpPayloadFeedback;
typedef std::map<uint32, base::TimeTicks> TimeLastNackMap;
@@ -25,7 +25,7 @@ class CastMessageBuilder {
public:
CastMessageBuilder(base::TickClock* clock,
RtpPayloadFeedback* incoming_payload_feedback,
- FrameIdMap* frame_id_map,
+ const Framer* framer,
uint32 media_ssrc,
bool decoder_faster_than_max_frame_rate,
int max_unacked_frames);
@@ -44,8 +44,8 @@ class CastMessageBuilder {
base::TickClock* const clock_; // Not owned by this class.
RtpPayloadFeedback* const cast_feedback_;
- // CastMessageBuilder has only const access to the frame id mapper.
- const FrameIdMap* const frame_id_map_;
+ // CastMessageBuilder has only const access to the framer.
+ const Framer* const framer_;
const uint32 media_ssrc_;
const bool decoder_faster_than_max_frame_rate_;
const int max_unacked_frames_;
diff --git a/media/cast/net/rtp/cast_message_builder_unittest.cc b/media/cast/net/rtp/cast_message_builder_unittest.cc
index e8a9aefa4a..02cfe1095f 100644
--- a/media/cast/net/rtp/cast_message_builder_unittest.cc
+++ b/media/cast/net/rtp/cast_message_builder_unittest.cc
@@ -8,6 +8,7 @@
#include "base/test/simple_test_tick_clock.h"
#include "media/cast/net/rtcp/rtcp.h"
#include "media/cast/net/rtp/cast_message_builder.h"
+#include "media/cast/net/rtp/framer.h"
#include "media/cast/net/rtp/rtp_receiver_defines.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -82,9 +83,14 @@ class NackFeedbackVerification : public RtpPayloadFeedback {
class CastMessageBuilderTest : public ::testing::Test {
protected:
CastMessageBuilderTest()
- : cast_msg_builder_(new CastMessageBuilder(&testing_clock_,
+ : framer_(&testing_clock_,
+ &feedback_,
+ kSsrc,
+ true,
+ 10),
+ cast_msg_builder_(new CastMessageBuilder(&testing_clock_,
&feedback_,
- &frame_id_map_,
+ &framer_,
kSsrc,
true,
0)) {
@@ -110,8 +116,9 @@ class CastMessageBuilderTest : public ::testing::Test {
void SetKeyFrame(bool is_key) { rtp_header_.is_key_frame = is_key; }
void InsertPacket() {
- PacketType packet_type = frame_id_map_.InsertPacket(rtp_header_);
- if (packet_type == kNewPacketCompletingFrame) {
+ bool duplicate;
+ uint8 payload = 0;
+ if (framer_.InsertPacket(&payload, 1, rtp_header_, &duplicate)) {
cast_msg_builder_->CompleteFrameReceived(rtp_header_.frame_id);
}
cast_msg_builder_->UpdateCastMessage();
@@ -120,16 +127,16 @@ class CastMessageBuilderTest : public ::testing::Test {
void SetDecoderSlowerThanMaxFrameRate(int max_unacked_frames) {
cast_msg_builder_.reset(new CastMessageBuilder(&testing_clock_,
&feedback_,
- &frame_id_map_,
+ &framer_,
kSsrc,
false,
max_unacked_frames));
}
NackFeedbackVerification feedback_;
+ Framer framer_;
scoped_ptr<CastMessageBuilder> cast_msg_builder_;
RtpCastHeader rtp_header_;
- FrameIdMap frame_id_map_;
base::SimpleTestTickClock testing_clock_;
DISALLOW_COPY_AND_ASSIGN(CastMessageBuilderTest);
@@ -196,7 +203,7 @@ TEST_F(CastMessageBuilderTest, RemoveOldFrames) {
InsertPacket();
testing_clock_.Advance(
base::TimeDelta::FromMilliseconds(kLongTimeIncrementMs));
- frame_id_map_.RemoveOldFrames(5); // Simulate 5 being pulled for rendering.
+ framer_.RemoveOldFrames(5); // Simulate 5 being pulled for rendering.
cast_msg_builder_->UpdateCastMessage();
EXPECT_TRUE(feedback_.triggered());
EXPECT_EQ(5u, feedback_.last_frame_acked());
@@ -283,7 +290,7 @@ TEST_F(CastMessageBuilderTest, Reset) {
testing_clock_.Advance(
base::TimeDelta::FromMilliseconds(kLongTimeIncrementMs));
cast_msg_builder_->Reset();
- frame_id_map_.Clear();
+ framer_.Reset();
// Should reset nack list state and request a key frame.
cast_msg_builder_->UpdateCastMessage();
EXPECT_TRUE(feedback_.triggered());
@@ -325,7 +332,7 @@ TEST_F(CastMessageBuilderTest, BasicRps) {
EXPECT_EQ(3u, feedback_.last_frame_acked());
testing_clock_.Advance(
base::TimeDelta::FromMilliseconds(kLongTimeIncrementMs));
- frame_id_map_.RemoveOldFrames(3); // Simulate 3 being pulled for rendering.
+ framer_.RemoveOldFrames(3); // Simulate 3 being pulled for rendering.
cast_msg_builder_->UpdateCastMessage();
EXPECT_TRUE(feedback_.triggered());
EXPECT_EQ(3u, feedback_.last_frame_acked());
@@ -357,7 +364,7 @@ TEST_F(CastMessageBuilderTest, InOrderRps) {
InsertPacket();
testing_clock_.Advance(
base::TimeDelta::FromMilliseconds(kShortTimeIncrementMs));
- frame_id_map_.RemoveOldFrames(3); // Simulate 3 being pulled for rendering.
+ framer_.RemoveOldFrames(3); // Simulate 3 being pulled for rendering.
testing_clock_.Advance(
base::TimeDelta::FromMilliseconds(kShortTimeIncrementMs));
cast_msg_builder_->UpdateCastMessage();
@@ -414,7 +421,7 @@ TEST_F(CastMessageBuilderTest, SlowDownAck) {
EXPECT_EQ(expected_frame_id, feedback_.last_frame_acked());
// Simulate frame_id being pulled for rendering.
- frame_id_map_.RemoveOldFrames(frame_id);
+ framer_.RemoveOldFrames(frame_id);
// We should now leave the slowdown ACK state.
++frame_id;
SetFrameIds(frame_id, frame_id - 1);
diff --git a/media/cast/net/rtp/frame_buffer.cc b/media/cast/net/rtp/frame_buffer.cc
index 4a911635f9..319aad2502 100644
--- a/media/cast/net/rtp/frame_buffer.cc
+++ b/media/cast/net/rtp/frame_buffer.cc
@@ -13,6 +13,7 @@ FrameBuffer::FrameBuffer()
: frame_id_(0),
max_packet_id_(0),
num_packets_received_(0),
+ max_seen_packet_id_(0),
new_playout_delay_ms_(0),
is_key_frame_(false),
total_data_size_(0),
@@ -21,7 +22,7 @@ FrameBuffer::FrameBuffer()
FrameBuffer::~FrameBuffer() {}
-void FrameBuffer::InsertPacket(const uint8* payload_data,
+bool FrameBuffer::InsertPacket(const uint8* payload_data,
size_t payload_size,
const RtpCastHeader& rtp_header) {
// Is this the first packet in the frame?
@@ -37,11 +38,11 @@ void FrameBuffer::InsertPacket(const uint8* payload_data,
}
// Is this the correct frame?
if (rtp_header.frame_id != frame_id_)
- return;
+ return false;
// Insert every packet only once.
if (packets_.find(rtp_header.packet_id) != packets_.end()) {
- return;
+ return false;
}
std::vector<uint8> data;
@@ -54,7 +55,9 @@ void FrameBuffer::InsertPacket(const uint8* payload_data,
payload_data, payload_data + payload_size, retval.first->second.begin());
++num_packets_received_;
+ max_seen_packet_id_ = std::max(max_seen_packet_id_, rtp_header.packet_id);
total_data_size_ += payload_size;
+ return true;
}
bool FrameBuffer::Complete() const {
@@ -86,5 +89,28 @@ bool FrameBuffer::AssembleEncodedFrame(EncodedFrame* frame) const {
return true;
}
+void FrameBuffer::GetMissingPackets(bool newest_frame,
+ PacketIdSet* missing_packets) const {
+ // Missing packets capped by max_seen_packet_id_.
+ // (Iff it's the latest frame)
+ int maximum = newest_frame ? max_seen_packet_id_ : max_packet_id_;
+ int packet = 0;
+ for (PacketMap::const_iterator i = packets_.begin();
+ i != packets_.end() && packet <= maximum;
+ ++i) {
+ int end = std::min<int>(i->first, maximum + 1);
+ while (packet < end) {
+ missing_packets->insert(packet);
+ packet++;
+ }
+ packet++;
+ }
+ while (packet <= maximum) {
+ missing_packets->insert(packet);
+ packet++;
+ }
+}
+
+
} // namespace cast
} // namespace media
diff --git a/media/cast/net/rtp/frame_buffer.h b/media/cast/net/rtp/frame_buffer.h
index 8ccc2092c0..1089433eae 100644
--- a/media/cast/net/rtp/frame_buffer.h
+++ b/media/cast/net/rtp/frame_buffer.h
@@ -20,11 +20,13 @@ class FrameBuffer {
public:
FrameBuffer();
~FrameBuffer();
- void InsertPacket(const uint8* payload_data,
+ bool InsertPacket(const uint8* payload_data,
size_t payload_size,
const RtpCastHeader& rtp_header);
bool Complete() const;
+ void GetMissingPackets(bool newest_frame, PacketIdSet* missing_packets) const;
+
// If a frame is complete, sets the frame IDs and RTP timestamp in |frame|,
// and also copies the data from all packets into the data field in |frame|.
// Returns true if the frame was complete; false if incomplete and |frame|
@@ -32,13 +34,14 @@ class FrameBuffer {
bool AssembleEncodedFrame(EncodedFrame* frame) const;
bool is_key_frame() const { return is_key_frame_; }
-
uint32 last_referenced_frame_id() const { return last_referenced_frame_id_; }
+ uint32 frame_id() const { return frame_id_; }
private:
uint32 frame_id_;
uint16 max_packet_id_;
uint16 num_packets_received_;
+ uint16 max_seen_packet_id_;
uint16 new_playout_delay_ms_;
bool is_key_frame_;
size_t total_data_size_;
diff --git a/media/cast/net/rtp/frame_id_map.cc b/media/cast/net/rtp/frame_id_map.cc
deleted file mode 100644
index f0b433c2fb..0000000000
--- a/media/cast/net/rtp/frame_id_map.cc
+++ /dev/null
@@ -1,247 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/cast/net/rtp/frame_id_map.h"
-
-#include "base/logging.h"
-#include "media/cast/net/rtp/rtp_receiver_defines.h"
-
-namespace media {
-namespace cast {
-
-FrameInfo::FrameInfo(uint32 frame_id,
- uint32 referenced_frame_id,
- uint16 max_packet_id,
- bool key_frame)
- : is_key_frame_(key_frame),
- frame_id_(frame_id),
- referenced_frame_id_(referenced_frame_id),
- max_received_packet_id_(0) {
- // Create the set with all packets missing.
- for (uint16 i = 0; i <= max_packet_id; i++) {
- missing_packets_.insert(i);
- }
-}
-
-FrameInfo::~FrameInfo() {}
-
-PacketType FrameInfo::InsertPacket(uint16 packet_id) {
- if (missing_packets_.find(packet_id) == missing_packets_.end()) {
- return kDuplicatePacket;
- }
- // Update the last received packet id.
- if (IsNewerPacketId(packet_id, max_received_packet_id_)) {
- max_received_packet_id_ = packet_id;
- }
- missing_packets_.erase(packet_id);
- return missing_packets_.empty() ? kNewPacketCompletingFrame : kNewPacket;
-}
-
-bool FrameInfo::Complete() const { return missing_packets_.empty(); }
-
-void FrameInfo::GetMissingPackets(bool newest_frame,
- PacketIdSet* missing_packets) const {
- if (newest_frame) {
- // Missing packets capped by max_received_packet_id_.
- PacketIdSet::const_iterator it_after_last_received =
- missing_packets_.lower_bound(max_received_packet_id_);
- missing_packets->insert(missing_packets_.begin(), it_after_last_received);
- } else {
- missing_packets->insert(missing_packets_.begin(), missing_packets_.end());
- }
-}
-
-FrameIdMap::FrameIdMap()
- : waiting_for_key_(true),
- last_released_frame_(kStartFrameId),
- newest_frame_id_(kStartFrameId) {}
-
-FrameIdMap::~FrameIdMap() {}
-
-PacketType FrameIdMap::InsertPacket(const RtpCastHeader& rtp_header) {
- uint32 frame_id = rtp_header.frame_id;
- uint32 reference_frame_id;
- reference_frame_id = rtp_header.reference_frame_id;
-
- if (rtp_header.is_key_frame && waiting_for_key_) {
- last_released_frame_ = static_cast<uint32>(frame_id - 1);
- waiting_for_key_ = false;
- }
-
- VLOG(3) << "InsertPacket frame:" << frame_id
- << " packet:" << static_cast<int>(rtp_header.packet_id)
- << " max packet:" << static_cast<int>(rtp_header.max_packet_id);
-
- if (IsOlderFrameId(frame_id, last_released_frame_) && !waiting_for_key_) {
- return kTooOldPacket;
- }
-
- // Update the last received frame id.
- if (IsNewerFrameId(frame_id, newest_frame_id_)) {
- newest_frame_id_ = frame_id;
- }
-
- // Does this packet belong to a new frame?
- FrameMap::iterator it = frame_map_.find(frame_id);
- PacketType packet_type;
- if (it == frame_map_.end()) {
- // New frame.
- linked_ptr<FrameInfo> frame_info(new FrameInfo(frame_id,
- reference_frame_id,
- rtp_header.max_packet_id,
- rtp_header.is_key_frame));
- std::pair<FrameMap::iterator, bool> retval =
- frame_map_.insert(std::make_pair(frame_id, frame_info));
-
- packet_type = retval.first->second->InsertPacket(rtp_header.packet_id);
- } else {
- // Insert packet to existing frame.
- packet_type = it->second->InsertPacket(rtp_header.packet_id);
- }
- return packet_type;
-}
-
-void FrameIdMap::RemoveOldFrames(uint32 frame_id) {
- FrameMap::iterator it = frame_map_.begin();
-
- while (it != frame_map_.end()) {
- if (IsNewerFrameId(it->first, frame_id)) {
- ++it;
- } else {
- // Older or equal; erase.
- frame_map_.erase(it++);
- }
- }
- last_released_frame_ = frame_id;
-}
-
-void FrameIdMap::Clear() {
- frame_map_.clear();
- waiting_for_key_ = true;
- last_released_frame_ = kStartFrameId;
- newest_frame_id_ = kStartFrameId;
-}
-
-uint32 FrameIdMap::NewestFrameId() const { return newest_frame_id_; }
-
-bool FrameIdMap::NextContinuousFrame(uint32* frame_id) const {
- FrameMap::const_iterator it;
-
- for (it = frame_map_.begin(); it != frame_map_.end(); ++it) {
- if (it->second->Complete() && ContinuousFrame(it->second.get())) {
- *frame_id = it->first;
- return true;
- }
- }
- return false;
-}
-
-bool FrameIdMap::HaveMultipleDecodableFrames() const {
- // Find the oldest decodable frame.
- FrameMap::const_iterator it;
- bool found_one = false;
- for (it = frame_map_.begin(); it != frame_map_.end(); ++it) {
- if (it->second->Complete() && DecodableFrame(it->second.get())) {
- if (found_one) {
- return true;
- } else {
- found_one = true;
- }
- }
- }
- return false;
-}
-
-uint32 FrameIdMap::LastContinuousFrame() const {
- uint32 last_continuous_frame_id = last_released_frame_;
- uint32 next_expected_frame = last_released_frame_;
-
- FrameMap::const_iterator it;
-
- do {
- next_expected_frame++;
- it = frame_map_.find(next_expected_frame);
- if (it == frame_map_.end())
- break;
- if (!it->second->Complete())
- break;
-
- // We found the next continuous frame.
- last_continuous_frame_id = it->first;
- } while (next_expected_frame != newest_frame_id_);
- return last_continuous_frame_id;
-}
-
-bool FrameIdMap::NextFrameAllowingSkippingFrames(uint32* frame_id) const {
- // Find the oldest decodable frame.
- FrameMap::const_iterator it_best_match = frame_map_.end();
- FrameMap::const_iterator it;
- for (it = frame_map_.begin(); it != frame_map_.end(); ++it) {
- if (it->second->Complete() && DecodableFrame(it->second.get())) {
- if (it_best_match == frame_map_.end() ||
- IsOlderFrameId(it->first, it_best_match->first)) {
- it_best_match = it;
- }
- }
- }
- if (it_best_match == frame_map_.end())
- return false;
-
- *frame_id = it_best_match->first;
- return true;
-}
-
-bool FrameIdMap::Empty() const { return frame_map_.empty(); }
-
-int FrameIdMap::NumberOfCompleteFrames() const {
- int count = 0;
- FrameMap::const_iterator it;
- for (it = frame_map_.begin(); it != frame_map_.end(); ++it) {
- if (it->second->Complete()) {
- ++count;
- }
- }
- return count;
-}
-
-bool FrameIdMap::FrameExists(uint32 frame_id) const {
- return frame_map_.end() != frame_map_.find(frame_id);
-}
-
-void FrameIdMap::GetMissingPackets(uint32 frame_id,
- bool last_frame,
- PacketIdSet* missing_packets) const {
- FrameMap::const_iterator it = frame_map_.find(frame_id);
- if (it == frame_map_.end())
- return;
-
- it->second->GetMissingPackets(last_frame, missing_packets);
-}
-
-bool FrameIdMap::ContinuousFrame(FrameInfo* frame) const {
- DCHECK(frame);
- if (waiting_for_key_ && !frame->is_key_frame())
- return false;
- return static_cast<uint32>(last_released_frame_ + 1) == frame->frame_id();
-}
-
-bool FrameIdMap::DecodableFrame(FrameInfo* frame) const {
- if (frame->is_key_frame())
- return true;
- if (waiting_for_key_ && !frame->is_key_frame())
- return false;
- // Self-reference?
- if (frame->referenced_frame_id() == frame->frame_id())
- return true;
-
- // Current frame is not necessarily referencing the last frame.
- // Do we have the reference frame?
- if (IsOlderFrameId(frame->referenced_frame_id(), last_released_frame_)) {
- return true;
- }
- return frame->referenced_frame_id() == last_released_frame_;
-}
-
-} // namespace cast
-} // namespace media
diff --git a/media/cast/net/rtp/frame_id_map.h b/media/cast/net/rtp/frame_id_map.h
deleted file mode 100644
index 9c1b674ff4..0000000000
--- a/media/cast/net/rtp/frame_id_map.h
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAST_FRAMER_FRAME_ID_MAP_H_
-#define MEDIA_CAST_FRAMER_FRAME_ID_MAP_H_
-
-#include <map>
-#include <set>
-
-#include "base/memory/linked_ptr.h"
-#include "base/memory/scoped_ptr.h"
-#include "media/cast/cast_config.h"
-#include "media/cast/net/rtcp/rtcp_defines.h"
-#include "media/cast/net/rtp/rtp_receiver_defines.h"
-
-namespace media {
-namespace cast {
-
-class FrameInfo {
- public:
- FrameInfo(uint32 frame_id,
- uint32 referenced_frame_id,
- uint16 max_packet_id,
- bool key_frame);
- ~FrameInfo();
-
- PacketType InsertPacket(uint16 packet_id);
- bool Complete() const;
- void GetMissingPackets(bool newest_frame, PacketIdSet* missing_packets) const;
-
- bool is_key_frame() const { return is_key_frame_; }
- uint32 frame_id() const { return frame_id_; }
- uint32 referenced_frame_id() const { return referenced_frame_id_; }
-
- private:
- const bool is_key_frame_;
- const uint32 frame_id_;
- const uint32 referenced_frame_id_;
-
- uint16 max_received_packet_id_;
- PacketIdSet missing_packets_;
-
- DISALLOW_COPY_AND_ASSIGN(FrameInfo);
-};
-
-typedef std::map<uint32, linked_ptr<FrameInfo> > FrameMap;
-
-class FrameIdMap {
- public:
- FrameIdMap();
- ~FrameIdMap();
-
- PacketType InsertPacket(const RtpCastHeader& rtp_header);
-
- bool Empty() const;
- bool FrameExists(uint32 frame_id) const;
- uint32 NewestFrameId() const;
-
- void RemoveOldFrames(uint32 frame_id);
- void Clear();
-
- // Identifies the next frame to be released (rendered).
- bool NextContinuousFrame(uint32* frame_id) const;
- uint32 LastContinuousFrame() const;
-
- bool NextFrameAllowingSkippingFrames(uint32* frame_id) const;
- bool HaveMultipleDecodableFrames() const;
-
- int NumberOfCompleteFrames() const;
- void GetMissingPackets(uint32 frame_id,
- bool last_frame,
- PacketIdSet* missing_packets) const;
-
- private:
- bool ContinuousFrame(FrameInfo* frame) const;
- bool DecodableFrame(FrameInfo* frame) const;
-
- FrameMap frame_map_;
- bool waiting_for_key_;
- uint32 last_released_frame_;
- uint32 newest_frame_id_;
-
- DISALLOW_COPY_AND_ASSIGN(FrameIdMap);
-};
-
-} // namespace cast
-} // namespace media
-
-#endif // MEDIA_CAST_FRAMER_FRAME_ID_MAP_H_
diff --git a/media/cast/net/rtp/framer.cc b/media/cast/net/rtp/framer.cc
index c94dc0c712..e807e94978 100644
--- a/media/cast/net/rtp/framer.cc
+++ b/media/cast/net/rtp/framer.cc
@@ -20,10 +20,13 @@ Framer::Framer(base::TickClock* clock,
cast_msg_builder_(
new CastMessageBuilder(clock,
incoming_payload_feedback,
- &frame_id_map_,
+ this,
ssrc,
decoder_faster_than_max_frame_rate,
- max_unacked_frames)) {
+ max_unacked_frames)),
+ waiting_for_key_(true),
+ last_released_frame_(kStartFrameId),
+ newest_frame_id_(kStartFrameId) {
DCHECK(incoming_payload_feedback) << "Invalid argument";
}
@@ -34,42 +37,58 @@ bool Framer::InsertPacket(const uint8* payload_data,
const RtpCastHeader& rtp_header,
bool* duplicate) {
*duplicate = false;
- PacketType packet_type = frame_id_map_.InsertPacket(rtp_header);
- if (packet_type == kTooOldPacket) {
- return false;
+ uint32 frame_id = rtp_header.frame_id;
+
+ if (rtp_header.is_key_frame && waiting_for_key_) {
+ last_released_frame_ = static_cast<uint32>(frame_id - 1);
+ waiting_for_key_ = false;
}
- if (packet_type == kDuplicatePacket) {
- VLOG(3) << "Packet already received, ignored: frame "
- << static_cast<int>(rtp_header.frame_id) << ", packet "
- << rtp_header.packet_id;
- *duplicate = true;
+
+ VLOG(1) << "InsertPacket frame:" << frame_id
+ << " packet:" << static_cast<int>(rtp_header.packet_id)
+ << " max packet:" << static_cast<int>(rtp_header.max_packet_id);
+
+ if (IsOlderFrameId(frame_id, last_released_frame_) && !waiting_for_key_) {
+ // Packet is too old.
return false;
}
+ // Update the last received frame id.
+ if (IsNewerFrameId(frame_id, newest_frame_id_)) {
+ newest_frame_id_ = frame_id;
+ }
+
// Does this packet belong to a new frame?
- FrameList::iterator it = frames_.find(rtp_header.frame_id);
+ FrameList::iterator it = frames_.find(frame_id);
if (it == frames_.end()) {
// New frame.
- linked_ptr<FrameBuffer> frame_buffer(new FrameBuffer());
- frame_buffer->InsertPacket(payload_data, payload_size, rtp_header);
- frames_.insert(std::make_pair(rtp_header.frame_id, frame_buffer));
- } else {
- // Insert packet to existing frame buffer.
- it->second->InsertPacket(payload_data, payload_size, rtp_header);
+ linked_ptr<FrameBuffer> frame_info(new FrameBuffer);
+ std::pair<FrameList::iterator, bool> retval =
+ frames_.insert(std::make_pair(frame_id, frame_info));
+ it = retval.first;
+ }
+
+ // Insert packet.
+ if (!it->second->InsertPacket(payload_data, payload_size, rtp_header)) {
+ VLOG(3) << "Packet already received, ignored: frame "
+ << static_cast<int>(rtp_header.frame_id) << ", packet "
+ << rtp_header.packet_id;
+ *duplicate = true;
+ return false;
}
- return packet_type == kNewPacketCompletingFrame;
+ return it->second->Complete();
}
// This does not release the frame.
bool Framer::GetEncodedFrame(EncodedFrame* frame,
bool* next_frame,
bool* have_multiple_decodable_frames) {
- *have_multiple_decodable_frames = frame_id_map_.HaveMultipleDecodableFrames();
+ *have_multiple_decodable_frames = HaveMultipleDecodableFrames();
uint32 frame_id;
// Find frame id.
- if (frame_id_map_.NextContinuousFrame(&frame_id)) {
+ if (NextContinuousFrame(&frame_id)) {
// We have our next frame.
*next_frame = true;
} else {
@@ -77,17 +96,12 @@ bool Framer::GetEncodedFrame(EncodedFrame* frame,
if (!decoder_faster_than_max_frame_rate_)
return false;
- if (!frame_id_map_.NextFrameAllowingSkippingFrames(&frame_id)) {
+ if (!NextFrameAllowingSkippingFrames(&frame_id)) {
return false;
}
*next_frame = false;
}
- if (*next_frame) {
- VLOG(2) << "ACK frame " << frame_id;
- cast_msg_builder_->CompleteFrameReceived(frame_id);
- }
-
ConstFrameIterator it = frames_.find(frame_id);
DCHECK(it != frames_.end());
if (it == frames_.end())
@@ -96,14 +110,21 @@ bool Framer::GetEncodedFrame(EncodedFrame* frame,
return it->second->AssembleEncodedFrame(frame);
}
+void Framer::AckFrame(uint32 frame_id) {
+ VLOG(2) << "ACK frame " << frame_id;
+ cast_msg_builder_->CompleteFrameReceived(frame_id);
+}
+
void Framer::Reset() {
- frame_id_map_.Clear();
+ waiting_for_key_ = true;
+ last_released_frame_ = kStartFrameId;
+ newest_frame_id_ = kStartFrameId;
frames_.clear();
cast_msg_builder_->Reset();
}
void Framer::ReleaseFrame(uint32 frame_id) {
- frame_id_map_.RemoveOldFrames(frame_id);
+ RemoveOldFrames(frame_id);
frames_.erase(frame_id);
// We have a frame - remove all frames with lower frame id.
@@ -128,5 +149,140 @@ bool Framer::TimeToSendNextCastMessage(base::TimeTicks* time_to_send) {
void Framer::SendCastMessage() { cast_msg_builder_->UpdateCastMessage(); }
+void Framer::RemoveOldFrames(uint32 frame_id) {
+ FrameList::iterator it = frames_.begin();
+
+ while (it != frames_.end()) {
+ if (IsNewerFrameId(it->first, frame_id)) {
+ ++it;
+ } else {
+ // Older or equal; erase.
+ frames_.erase(it++);
+ }
+ }
+ last_released_frame_ = frame_id;
+}
+
+uint32 Framer::NewestFrameId() const { return newest_frame_id_; }
+
+bool Framer::NextContinuousFrame(uint32* frame_id) const {
+ FrameList::const_iterator it;
+
+ for (it = frames_.begin(); it != frames_.end(); ++it) {
+ if (it->second->Complete() && ContinuousFrame(it->second.get())) {
+ *frame_id = it->first;
+ return true;
+ }
+ }
+ return false;
+}
+
+bool Framer::HaveMultipleDecodableFrames() const {
+ // Find the oldest decodable frame.
+ FrameList::const_iterator it;
+ bool found_one = false;
+ for (it = frames_.begin(); it != frames_.end(); ++it) {
+ if (it->second->Complete() && DecodableFrame(it->second.get())) {
+ if (found_one) {
+ return true;
+ } else {
+ found_one = true;
+ }
+ }
+ }
+ return false;
+}
+
+uint32 Framer::LastContinuousFrame() const {
+ uint32 last_continuous_frame_id = last_released_frame_;
+ uint32 next_expected_frame = last_released_frame_;
+
+ FrameList::const_iterator it;
+
+ do {
+ next_expected_frame++;
+ it = frames_.find(next_expected_frame);
+ if (it == frames_.end())
+ break;
+ if (!it->second->Complete())
+ break;
+
+ // We found the next continuous frame.
+ last_continuous_frame_id = it->first;
+ } while (next_expected_frame != newest_frame_id_);
+ return last_continuous_frame_id;
+}
+
+bool Framer::NextFrameAllowingSkippingFrames(uint32* frame_id) const {
+ // Find the oldest decodable frame.
+ FrameList::const_iterator it_best_match = frames_.end();
+ FrameList::const_iterator it;
+ for (it = frames_.begin(); it != frames_.end(); ++it) {
+ if (it->second->Complete() && DecodableFrame(it->second.get())) {
+ if (it_best_match == frames_.end() ||
+ IsOlderFrameId(it->first, it_best_match->first)) {
+ it_best_match = it;
+ }
+ }
+ }
+ if (it_best_match == frames_.end())
+ return false;
+
+ *frame_id = it_best_match->first;
+ return true;
+}
+
+bool Framer::Empty() const { return frames_.empty(); }
+
+int Framer::NumberOfCompleteFrames() const {
+ int count = 0;
+ FrameList::const_iterator it;
+ for (it = frames_.begin(); it != frames_.end(); ++it) {
+ if (it->second->Complete()) {
+ ++count;
+ }
+ }
+ return count;
+}
+
+bool Framer::FrameExists(uint32 frame_id) const {
+ return frames_.end() != frames_.find(frame_id);
+}
+
+void Framer::GetMissingPackets(uint32 frame_id,
+ bool last_frame,
+ PacketIdSet* missing_packets) const {
+ FrameList::const_iterator it = frames_.find(frame_id);
+ if (it == frames_.end())
+ return;
+
+ it->second->GetMissingPackets(last_frame, missing_packets);
+}
+
+bool Framer::ContinuousFrame(FrameBuffer* frame) const {
+ DCHECK(frame);
+ if (waiting_for_key_ && !frame->is_key_frame())
+ return false;
+ return static_cast<uint32>(last_released_frame_ + 1) == frame->frame_id();
+}
+
+bool Framer::DecodableFrame(FrameBuffer* frame) const {
+ if (frame->is_key_frame())
+ return true;
+ if (waiting_for_key_ && !frame->is_key_frame())
+ return false;
+ // Self-reference?
+ if (frame->last_referenced_frame_id() == frame->frame_id())
+ return true;
+
+ // Current frame is not necessarily referencing the last frame.
+ // Do we have the reference frame?
+ if (IsOlderFrameId(frame->last_referenced_frame_id(), last_released_frame_)) {
+ return true;
+ }
+ return frame->last_referenced_frame_id() == last_released_frame_;
+}
+
+
} // namespace cast
} // namespace media
diff --git a/media/cast/net/rtp/framer.h b/media/cast/net/rtp/framer.h
index cf70ef191e..95c1a0eb62 100644
--- a/media/cast/net/rtp/framer.h
+++ b/media/cast/net/rtp/framer.h
@@ -15,7 +15,6 @@
#include "media/cast/net/rtcp/rtcp.h"
#include "media/cast/net/rtp/cast_message_builder.h"
#include "media/cast/net/rtp/frame_buffer.h"
-#include "media/cast/net/rtp/frame_id_map.h"
#include "media/cast/net/rtp/rtp_receiver_defines.h"
namespace media {
@@ -49,6 +48,9 @@ class Framer {
bool* next_frame,
bool* have_multiple_complete_frames);
+ // TODO(hubbe): Move this elsewhere.
+ void AckFrame(uint32 frame_id);
+
void ReleaseFrame(uint32 frame_id);
// Reset framer state to original state and flush all pending buffers.
@@ -56,12 +58,34 @@ class Framer {
bool TimeToSendNextCastMessage(base::TimeTicks* time_to_send);
void SendCastMessage();
+ bool Empty() const;
+ bool FrameExists(uint32 frame_id) const;
+ uint32 NewestFrameId() const;
+
+ void RemoveOldFrames(uint32 frame_id);
+
+ // Identifies the next frame to be released (rendered).
+ bool NextContinuousFrame(uint32* frame_id) const;
+ uint32 LastContinuousFrame() const;
+
+ bool NextFrameAllowingSkippingFrames(uint32* frame_id) const;
+ bool HaveMultipleDecodableFrames() const;
+
+ int NumberOfCompleteFrames() const;
+ void GetMissingPackets(uint32 frame_id,
+ bool last_frame,
+ PacketIdSet* missing_packets) const;
+
private:
+ bool ContinuousFrame(FrameBuffer* frame) const;
+ bool DecodableFrame(FrameBuffer* frame) const;
+
const bool decoder_faster_than_max_frame_rate_;
FrameList frames_;
- FrameIdMap frame_id_map_;
-
scoped_ptr<CastMessageBuilder> cast_msg_builder_;
+ bool waiting_for_key_;
+ uint32 last_released_frame_;
+ uint32 newest_frame_id_;
DISALLOW_COPY_AND_ASSIGN(Framer);
};
diff --git a/media/cast/net/rtp/packet_storage.cc b/media/cast/net/rtp/packet_storage.cc
index 59ac9ce44e..0408eb1823 100644
--- a/media/cast/net/rtp/packet_storage.cc
+++ b/media/cast/net/rtp/packet_storage.cc
@@ -4,47 +4,59 @@
#include "media/cast/net/rtp/packet_storage.h"
-#include <string>
-
#include "base/logging.h"
#include "media/cast/cast_defines.h"
namespace media {
namespace cast {
-PacketStorage::PacketStorage(size_t stored_frames)
- : max_stored_frames_(stored_frames),
- first_frame_id_in_list_(0),
- last_frame_id_in_list_(0) {
+PacketStorage::PacketStorage()
+ : first_frame_id_in_list_(0),
+ zombie_count_(0) {
}
PacketStorage::~PacketStorage() {
}
-bool PacketStorage::IsValid() const {
- return max_stored_frames_ > 0 &&
- static_cast<int>(max_stored_frames_) <= kMaxUnackedFrames;
-}
-
size_t PacketStorage::GetNumberOfStoredFrames() const {
- return frames_.size();
+ return frames_.size() - zombie_count_;
}
void PacketStorage::StoreFrame(uint32 frame_id,
const SendPacketVector& packets) {
+ if (packets.empty()) {
+ NOTREACHED();
+ return;
+ }
+
if (frames_.empty()) {
first_frame_id_in_list_ = frame_id;
} else {
// Make sure frame IDs are consecutive.
- DCHECK_EQ(last_frame_id_in_list_ + 1, frame_id);
+ DCHECK_EQ(first_frame_id_in_list_ + static_cast<uint32>(frames_.size()),
+ frame_id);
+ // Make sure we aren't being asked to store more frames than the system's
+ // design limit.
+ DCHECK_LT(frames_.size(), static_cast<size_t>(kMaxUnackedFrames));
}
// Save new frame to the end of the list.
- last_frame_id_in_list_ = frame_id;
frames_.push_back(packets);
+}
+
+void PacketStorage::ReleaseFrame(uint32 frame_id) {
+ const uint32 offset = frame_id - first_frame_id_in_list_;
+ if (static_cast<int32>(offset) < 0 || offset >= frames_.size() ||
+ frames_[offset].empty()) {
+ return;
+ }
+
+ frames_[offset].clear();
+ ++zombie_count_;
- // Evict the oldest frame if the list is too long.
- if (frames_.size() > max_stored_frames_) {
+ while (!frames_.empty() && frames_.front().empty()) {
+ DCHECK_GT(zombie_count_, 0u);
+ --zombie_count_;
frames_.pop_front();
++first_frame_id_in_list_;
}
@@ -57,7 +69,8 @@ const SendPacketVector* PacketStorage::GetFrame8(uint8 frame_id_8bits) const {
index_8bits = frame_id_8bits - index_8bits;
if (index_8bits >= frames_.size())
return NULL;
- return &(frames_[index_8bits]);
+ const SendPacketVector& packets = frames_[index_8bits];
+ return packets.empty() ? NULL : &packets;
}
} // namespace cast
diff --git a/media/cast/net/rtp/packet_storage.h b/media/cast/net/rtp/packet_storage.h
index 9330a6a0ae..e086f8b29b 100644
--- a/media/cast/net/rtp/packet_storage.h
+++ b/media/cast/net/rtp/packet_storage.h
@@ -6,37 +6,24 @@
#define MEDIA_CAST_NET_RTP_SENDER_PACKET_STORAGE_PACKET_STORAGE_H_
#include <deque>
-#include <list>
-#include <map>
-#include <vector>
#include "base/basictypes.h"
-#include "base/memory/linked_ptr.h"
-#include "base/memory/scoped_ptr.h"
-#include "base/time/tick_clock.h"
-#include "base/time/time.h"
-#include "media/cast/net/cast_transport_config.h"
-#include "media/cast/net/cast_transport_defines.h"
#include "media/cast/net/pacing/paced_sender.h"
namespace media {
namespace cast {
-// Stores a list of frames. Each frame consists a list of packets.
-typedef std::deque<SendPacketVector> FrameQueue;
-
class PacketStorage {
public:
- explicit PacketStorage(size_t stored_frames);
+ PacketStorage();
virtual ~PacketStorage();
- // Returns true if this class is configured correctly.
- // (stored frames > 0 && stored_frames < kMaxStoredFrames)
- bool IsValid() const;
-
// Store all of the packets for a frame.
void StoreFrame(uint32 frame_id, const SendPacketVector& packets);
+ // Release all of the packets for a frame.
+ void ReleaseFrame(uint32 frame_id);
+
// Returns a list of packets for a frame indexed by a 8-bits ID.
// It is the lowest 8 bits of a frame ID.
// Returns NULL if the frame cannot be found.
@@ -46,10 +33,12 @@ class PacketStorage {
size_t GetNumberOfStoredFrames() const;
private:
- const size_t max_stored_frames_;
- FrameQueue frames_;
+ std::deque<SendPacketVector> frames_;
uint32 first_frame_id_in_list_;
- uint32 last_frame_id_in_list_;
+
+ // The number of frames whose packets have been released, but the entry in the
+ // |frames_| queue has not yet been popped.
+ size_t zombie_count_;
DISALLOW_COPY_AND_ASSIGN(PacketStorage);
};
diff --git a/media/cast/net/rtp/packet_storage_unittest.cc b/media/cast/net/rtp/packet_storage_unittest.cc
index 5e9393da5d..ddebb7ea57 100644
--- a/media/cast/net/rtp/packet_storage_unittest.cc
+++ b/media/cast/net/rtp/packet_storage_unittest.cc
@@ -6,16 +6,18 @@
#include <stdint.h>
+#include <algorithm>
#include <vector>
#include "base/test/simple_test_tick_clock.h"
#include "base/time/time.h"
+#include "media/cast/cast_defines.h"
#include "testing/gmock/include/gmock/gmock.h"
namespace media {
namespace cast {
-static size_t kStoredFrames = 10;
+static const size_t kStoredFrames = 10;
// Generate |number_of_frames| and store into |*storage|.
// First frame has 1 packet, second frame has 2 packets, etc.
@@ -41,20 +43,22 @@ static void StoreFrames(size_t number_of_frames,
}
TEST(PacketStorageTest, NumberOfStoredFrames) {
- PacketStorage storage(kStoredFrames);
+ PacketStorage storage;
uint32 frame_id = 0;
frame_id = ~frame_id; // The maximum value of uint32.
- StoreFrames(200, frame_id, &storage);
- EXPECT_EQ(kStoredFrames, storage.GetNumberOfStoredFrames());
+ StoreFrames(kMaxUnackedFrames / 2, frame_id, &storage);
+ EXPECT_EQ(static_cast<size_t>(kMaxUnackedFrames / 2),
+ storage.GetNumberOfStoredFrames());
}
TEST(PacketStorageTest, GetFrameWrapAround8bits) {
- PacketStorage storage(kStoredFrames);
+ PacketStorage storage;
const uint32 kFirstFrameId = 250;
StoreFrames(kStoredFrames, kFirstFrameId, &storage);
- EXPECT_EQ(kStoredFrames, storage.GetNumberOfStoredFrames());
+ EXPECT_EQ(std::min<size_t>(kMaxUnackedFrames, kStoredFrames),
+ storage.GetNumberOfStoredFrames());
// Expect we get the correct frames by looking at the number of
// packets.
@@ -67,12 +71,13 @@ TEST(PacketStorageTest, GetFrameWrapAround8bits) {
}
TEST(PacketStorageTest, GetFrameWrapAround32bits) {
- PacketStorage storage(kStoredFrames);
+ PacketStorage storage;
// First frame ID is close to the maximum value of uint32.
uint32 first_frame_id = 0xffffffff - 5;
StoreFrames(kStoredFrames, first_frame_id, &storage);
- EXPECT_EQ(kStoredFrames, storage.GetNumberOfStoredFrames());
+ EXPECT_EQ(std::min<size_t>(kMaxUnackedFrames, kStoredFrames),
+ storage.GetNumberOfStoredFrames());
// Expect we get the correct frames by looking at the number of
// packets.
@@ -84,29 +89,38 @@ TEST(PacketStorageTest, GetFrameWrapAround32bits) {
}
}
-TEST(PacketStorageTest, GetFrameTooOld) {
- PacketStorage storage(kStoredFrames);
+TEST(PacketStorageTest, FramesReleased) {
+ PacketStorage storage;
- // First frame ID is close to the maximum value of uint32.
- uint32 first_frame_id = 0xffffffff - 5;
+ const uint32 kFirstFrameId = 0;
+ StoreFrames(5, kFirstFrameId, &storage);
+ EXPECT_EQ(std::min<size_t>(kMaxUnackedFrames, 5),
+ storage.GetNumberOfStoredFrames());
- // Store two times the capacity.
- StoreFrames(2 * kStoredFrames, first_frame_id, &storage);
- EXPECT_EQ(kStoredFrames, storage.GetNumberOfStoredFrames());
-
- uint32 frame_id = first_frame_id;
- // Old frames are evicted.
- for (size_t i = 0; i < kStoredFrames; ++i) {
- EXPECT_FALSE(storage.GetFrame8(frame_id));
- ++frame_id;
- }
- // Check recent frames are there.
- for (size_t i = 0; i < kStoredFrames; ++i) {
- ASSERT_TRUE(storage.GetFrame8(frame_id));
- EXPECT_EQ(kStoredFrames + i + 1,
- storage.GetFrame8(frame_id)->size());
- ++frame_id;
+ for (uint32 frame_id = kFirstFrameId; frame_id < kFirstFrameId + 5;
+ ++frame_id) {
+ EXPECT_TRUE(storage.GetFrame8(frame_id));
}
+
+ storage.ReleaseFrame(kFirstFrameId + 2);
+ EXPECT_EQ(4u, storage.GetNumberOfStoredFrames());
+ EXPECT_FALSE(storage.GetFrame8(kFirstFrameId + 2));
+
+ storage.ReleaseFrame(kFirstFrameId + 0);
+ EXPECT_EQ(3u, storage.GetNumberOfStoredFrames());
+ EXPECT_FALSE(storage.GetFrame8(kFirstFrameId + 0));
+
+ storage.ReleaseFrame(kFirstFrameId + 3);
+ EXPECT_EQ(2u, storage.GetNumberOfStoredFrames());
+ EXPECT_FALSE(storage.GetFrame8(kFirstFrameId + 3));
+
+ storage.ReleaseFrame(kFirstFrameId + 4);
+ EXPECT_EQ(1u, storage.GetNumberOfStoredFrames());
+ EXPECT_FALSE(storage.GetFrame8(kFirstFrameId + 4));
+
+ storage.ReleaseFrame(kFirstFrameId + 1);
+ EXPECT_EQ(0u, storage.GetNumberOfStoredFrames());
+ EXPECT_FALSE(storage.GetFrame8(kFirstFrameId + 1));
}
} // namespace cast
diff --git a/media/cast/net/rtp/rtp_packetizer_unittest.cc b/media/cast/net/rtp/rtp_packetizer_unittest.cc
index 5e0d388cab..6ac8ea17ee 100644
--- a/media/cast/net/rtp/rtp_packetizer_unittest.cc
+++ b/media/cast/net/rtp/rtp_packetizer_unittest.cc
@@ -102,16 +102,18 @@ class TestRtpPacketTransport : public PacketSender {
class RtpPacketizerTest : public ::testing::Test {
protected:
RtpPacketizerTest()
- : task_runner_(new test::FakeSingleThreadTaskRunner(&testing_clock_)),
- video_frame_(),
- packet_storage_(200) {
+ : task_runner_(new test::FakeSingleThreadTaskRunner(&testing_clock_)) {
config_.sequence_number = kSeqNum;
config_.ssrc = kSsrc;
config_.payload_type = kPayload;
config_.max_payload_length = kMaxPacketLength;
transport_.reset(new TestRtpPacketTransport(config_));
- pacer_.reset(new PacedSender(
- &testing_clock_, &logging_, transport_.get(), task_runner_));
+ pacer_.reset(new PacedSender(kTargetBurstSize,
+ kMaxBurstSize,
+ &testing_clock_,
+ &logging_,
+ transport_.get(),
+ task_runner_));
pacer_->RegisterVideoSsrc(config_.ssrc);
rtp_packetizer_.reset(new RtpPacketizer(
pacer_.get(), &packet_storage_, config_));
diff --git a/media/cast/net/rtp/rtp_sender.cc b/media/cast/net/rtp/rtp_sender.cc
index bf1200fd33..c0a7575711 100644
--- a/media/cast/net/rtp/rtp_sender.cc
+++ b/media/cast/net/rtp/rtp_sender.cc
@@ -41,25 +41,22 @@ RtpSender::RtpSender(
RtpSender::~RtpSender() {}
bool RtpSender::Initialize(const CastTransportRtpConfig& config) {
- storage_.reset(new PacketStorage(config.stored_frames));
- if (!storage_->IsValid()) {
- return false;
- }
config_.ssrc = config.ssrc;
config_.payload_type = config.rtp_payload_type;
- packetizer_.reset(new RtpPacketizer(transport_, storage_.get(), config_));
+ packetizer_.reset(new RtpPacketizer(transport_, &storage_, config_));
return true;
}
void RtpSender::SendFrame(const EncodedFrame& frame) {
DCHECK(packetizer_);
packetizer_->SendFrameAsPackets(frame);
+ LOG_IF(DFATAL, storage_.GetNumberOfStoredFrames() > kMaxUnackedFrames)
+ << "Possible bug: Frames are not being actively released from storage.";
}
void RtpSender::ResendPackets(
const MissingFramesAndPacketsMap& missing_frames_and_packets,
bool cancel_rtx_if_not_in_list, const DedupInfo& dedup_info) {
- DCHECK(storage_);
// Iterate over all frames in the list.
for (MissingFramesAndPacketsMap::const_iterator it =
missing_frames_and_packets.begin();
@@ -76,7 +73,7 @@ void RtpSender::ResendPackets(
bool resend_last = missing_packet_set.find(kRtcpCastLastPacket) !=
missing_packet_set.end();
- const SendPacketVector* stored_packets = storage_->GetFrame8(frame_id);
+ const SendPacketVector* stored_packets = storage_.GetFrame8(frame_id);
if (!stored_packets)
continue;
@@ -119,13 +116,14 @@ void RtpSender::ResendPackets(
void RtpSender::CancelSendingFrames(const std::vector<uint32>& frame_ids) {
for (std::vector<uint32>::const_iterator i = frame_ids.begin();
i != frame_ids.end(); ++i) {
- const SendPacketVector* stored_packets = storage_->GetFrame8(*i & 0xFF);
+ const SendPacketVector* stored_packets = storage_.GetFrame8(*i & 0xFF);
if (!stored_packets)
continue;
for (SendPacketVector::const_iterator j = stored_packets->begin();
j != stored_packets->end(); ++j) {
transport_->CancelSendingPacket(j->first);
}
+ storage_.ReleaseFrame(*i);
}
}
@@ -157,7 +155,7 @@ void RtpSender::UpdateSequenceNumber(Packet* packet) {
}
int64 RtpSender::GetLastByteSentForFrame(uint32 frame_id) {
- const SendPacketVector* stored_packets = storage_->GetFrame8(frame_id & 0xFF);
+ const SendPacketVector* stored_packets = storage_.GetFrame8(frame_id & 0xFF);
if (!stored_packets)
return 0;
PacketKey last_packet_key = stored_packets->rbegin()->first;
diff --git a/media/cast/net/rtp/rtp_sender.h b/media/cast/net/rtp/rtp_sender.h
index 2875114aa3..a2a5c3871d 100644
--- a/media/cast/net/rtp/rtp_sender.h
+++ b/media/cast/net/rtp/rtp_sender.h
@@ -71,8 +71,8 @@ class RtpSender {
base::TickClock* clock_; // Not owned by this class.
RtpPacketizerConfig config_;
+ PacketStorage storage_;
scoped_ptr<RtpPacketizer> packetizer_;
- scoped_ptr<PacketStorage> storage_;
PacedSender* const transport_;
scoped_refptr<base::SingleThreadTaskRunner> transport_task_runner_;
diff --git a/media/cast/net/udp_transport.cc b/media/cast/net/udp_transport.cc
index d6ca677680..fa9be469ed 100644
--- a/media/cast/net/udp_transport.cc
+++ b/media/cast/net/udp_transport.cc
@@ -116,12 +116,12 @@ void UdpTransport::ReceiveNextPacket(int length_or_status) {
next_packet_.reset(new Packet(kMaxPacketSize));
recv_buf_ = new net::WrappedIOBuffer(
reinterpret_cast<char*>(&next_packet_->front()));
- length_or_status = udp_socket_->RecvFrom(
- recv_buf_,
- kMaxPacketSize,
- &recv_addr_,
- base::Bind(&UdpTransport::ReceiveNextPacket,
- weak_factory_.GetWeakPtr()));
+ length_or_status =
+ udp_socket_->RecvFrom(recv_buf_.get(),
+ kMaxPacketSize,
+ &recv_addr_,
+ base::Bind(&UdpTransport::ReceiveNextPacket,
+ weak_factory_.GetWeakPtr()));
if (length_or_status == net::ERR_IO_PENDING) {
receive_pending_ = true;
return;
@@ -196,11 +196,10 @@ bool UdpTransport::SendPacket(PacketRef packet, const base::Closure& cb) {
// If we called Connect() before we must call Write() instead of
// SendTo(). Otherwise on some platforms we might get
// ERR_SOCKET_IS_CONNECTED.
- result = udp_socket_->Write(buf,
- static_cast<int>(packet->data.size()),
- callback);
+ result = udp_socket_->Write(
+ buf.get(), static_cast<int>(packet->data.size()), callback);
} else if (!IsEmpty(remote_addr_)) {
- result = udp_socket_->SendTo(buf,
+ result = udp_socket_->SendTo(buf.get(),
static_cast<int>(packet->data.size()),
remote_addr_,
callback);
diff --git a/media/cast/receiver/audio_decoder.cc b/media/cast/receiver/audio_decoder.cc
index dac0a5e9a1..aeed137d14 100644
--- a/media/cast/receiver/audio_decoder.cc
+++ b/media/cast/receiver/audio_decoder.cc
@@ -220,7 +220,7 @@ AudioDecoder::AudioDecoder(
AudioDecoder::~AudioDecoder() {}
CastInitializationStatus AudioDecoder::InitializationResult() const {
- if (impl_)
+ if (impl_.get())
return impl_->InitializationResult();
return STATUS_UNSUPPORTED_AUDIO_CODEC;
}
@@ -230,7 +230,8 @@ void AudioDecoder::DecodeFrame(
const DecodeFrameCallback& callback) {
DCHECK(encoded_frame.get());
DCHECK(!callback.is_null());
- if (!impl_ || impl_->InitializationResult() != STATUS_AUDIO_INITIALIZED) {
+ if (!impl_.get() ||
+ impl_->InitializationResult() != STATUS_AUDIO_INITIALIZED) {
callback.Run(make_scoped_ptr<AudioBus>(NULL), false);
return;
}
diff --git a/media/cast/receiver/cast_receiver_impl.cc b/media/cast/receiver/cast_receiver_impl.cc
index 36669b9e62..8265211c71 100644
--- a/media/cast/receiver/cast_receiver_impl.cc
+++ b/media/cast/receiver/cast_receiver_impl.cc
@@ -31,7 +31,9 @@ CastReceiverImpl::CastReceiverImpl(
const FrameReceiverConfig& video_config,
PacketSender* const packet_sender)
: cast_environment_(cast_environment),
- pacer_(cast_environment->Clock(),
+ pacer_(kTargetBurstSize,
+ kMaxBurstSize,
+ cast_environment->Clock(),
cast_environment->Logging(),
packet_sender,
cast_environment->GetTaskRunner(CastEnvironment::MAIN)),
@@ -211,7 +213,7 @@ void CastReceiverImpl::EmitDecodedVideoFrame(
const scoped_refptr<VideoFrame>& video_frame,
bool is_continuous) {
DCHECK(cast_environment->CurrentlyOn(CastEnvironment::MAIN));
- if (video_frame) {
+ if (video_frame.get()) {
const base::TimeTicks now = cast_environment->Clock()->NowTicks();
cast_environment->Logging()->InsertFrameEvent(
now, FRAME_DECODED, VIDEO_EVENT, rtp_timestamp, frame_id);
diff --git a/media/cast/receiver/frame_receiver.cc b/media/cast/receiver/frame_receiver.cc
index 0e794cdb27..095e611553 100644
--- a/media/cast/receiver/frame_receiver.cc
+++ b/media/cast/receiver/frame_receiver.cc
@@ -216,9 +216,10 @@ void FrameReceiver::EmitAvailableEncodedFrames() {
// skipping one or more frames. Skip if the missing frame wouldn't complete
// playing before the start of playback of the available frame.
if (!is_consecutively_next_frame) {
- // TODO(miu): Also account for expected decode time here?
+ // This assumes that decoding takes as long as playing, which might
+ // not be true.
const base::TimeTicks earliest_possible_end_time_of_missing_frame =
- now + expected_frame_duration_;
+ now + expected_frame_duration_ * 2;
if (earliest_possible_end_time_of_missing_frame < playout_time) {
VLOG(1) << "Wait for next consecutive frame instead of skipping.";
if (!is_waiting_for_consecutive_frame_) {
@@ -234,6 +235,11 @@ void FrameReceiver::EmitAvailableEncodedFrames() {
}
}
+ // At this point, we have the complete next frame, or a decodable
+ // frame from somewhere later in the stream, AND we have given up
+ // on waiting for any frames in between, so now we can ACK the frame.
+ framer_.AckFrame(encoded_frame->frame_id);
+
// Decrypt the payload data in the frame, if crypto is being used.
if (decryptor_.is_activated()) {
std::string decrypted_data;
@@ -256,7 +262,9 @@ void FrameReceiver::EmitAvailableEncodedFrames() {
}
cast_environment_->PostTask(CastEnvironment::MAIN,
FROM_HERE,
- base::Bind(frame_request_queue_.front(),
+ base::Bind(&FrameReceiver::EmitOneFrame,
+ weak_factory_.GetWeakPtr(),
+ frame_request_queue_.front(),
base::Passed(&encoded_frame)));
frame_request_queue_.pop_front();
}
@@ -269,6 +277,13 @@ void FrameReceiver::EmitAvailableEncodedFramesAfterWaiting() {
EmitAvailableEncodedFrames();
}
+void FrameReceiver::EmitOneFrame(const ReceiveEncodedFrameCallback& callback,
+ scoped_ptr<EncodedFrame> encoded_frame) const {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ if (!callback.is_null())
+ callback.Run(encoded_frame.Pass());
+}
+
base::TimeTicks FrameReceiver::GetPlayoutTime(const EncodedFrame& frame) const {
base::TimeDelta target_playout_delay = target_playout_delay_;
if (frame.new_playout_delay_ms) {
diff --git a/media/cast/receiver/frame_receiver.h b/media/cast/receiver/frame_receiver.h
index 695c8d0a9b..67f5417145 100644
--- a/media/cast/receiver/frame_receiver.h
+++ b/media/cast/receiver/frame_receiver.h
@@ -89,6 +89,13 @@ class FrameReceiver : public RtpPayloadFeedback,
// EmitAvailableEncodedFrames().
void EmitAvailableEncodedFramesAfterWaiting();
+ // Helper that runs |callback|, passing ownership of |encoded_frame| to it.
+ // This method is used by EmitAvailableEncodedFrames() to return to the event
+ // loop, but make sure that FrameReceiver is still alive before the callback
+ // is run.
+ void EmitOneFrame(const ReceiveEncodedFrameCallback& callback,
+ scoped_ptr<EncodedFrame> encoded_frame) const;
+
// Computes the playout time for a frame with the given |rtp_timestamp|.
// Because lip-sync info is refreshed regularly, calling this method with the
// same argument may return different results.
diff --git a/media/cast/receiver/video_decoder.cc b/media/cast/receiver/video_decoder.cc
index f4de9c3b31..2c7a9fddb8 100644
--- a/media/cast/receiver/video_decoder.cc
+++ b/media/cast/receiver/video_decoder.cc
@@ -150,15 +150,15 @@ class VideoDecoder::Vp8Impl : public VideoDecoder::ImplBase {
CopyYPlane(image->planes[VPX_PLANE_Y],
image->stride[VPX_PLANE_Y],
image->d_h,
- decoded_frame);
+ decoded_frame.get());
CopyUPlane(image->planes[VPX_PLANE_U],
image->stride[VPX_PLANE_U],
(image->d_h + 1) / 2,
- decoded_frame);
+ decoded_frame.get());
CopyVPlane(image->planes[VPX_PLANE_V],
image->stride[VPX_PLANE_V],
(image->d_h + 1) / 2,
- decoded_frame);
+ decoded_frame.get());
return decoded_frame;
}
@@ -238,7 +238,7 @@ VideoDecoder::VideoDecoder(
VideoDecoder::~VideoDecoder() {}
CastInitializationStatus VideoDecoder::InitializationResult() const {
- if (impl_)
+ if (impl_.get())
return impl_->InitializationResult();
return STATUS_UNSUPPORTED_VIDEO_CODEC;
}
@@ -248,7 +248,8 @@ void VideoDecoder::DecodeFrame(
const DecodeFrameCallback& callback) {
DCHECK(encoded_frame.get());
DCHECK(!callback.is_null());
- if (!impl_ || impl_->InitializationResult() != STATUS_VIDEO_INITIALIZED) {
+ if (!impl_.get() ||
+ impl_->InitializationResult() != STATUS_VIDEO_INITIALIZED) {
callback.Run(make_scoped_refptr<VideoFrame>(NULL), false);
return;
}
diff --git a/media/cast/receiver/video_decoder_unittest.cc b/media/cast/receiver/video_decoder_unittest.cc
index 7ea4b5da14..95d92b8064 100644
--- a/media/cast/receiver/video_decoder_unittest.cc
+++ b/media/cast/receiver/video_decoder_unittest.cc
@@ -12,6 +12,7 @@
#include "media/cast/cast_config.h"
#include "media/cast/receiver/video_decoder.h"
#include "media/cast/sender/vp8_encoder.h"
+#include "media/cast/test/utility/default_config.h"
#include "media/cast/test/utility/standalone_cast_environment.h"
#include "media/cast/test/utility/video_utility.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -26,7 +27,7 @@ const int kHeight = 240;
const int kFrameRate = 10;
VideoSenderConfig GetVideoSenderConfigForTest() {
- VideoSenderConfig config;
+ VideoSenderConfig config = GetDefaultVideoSenderConfig();
config.width = kWidth;
config.height = kHeight;
config.max_frame_rate = kFrameRate;
@@ -75,7 +76,7 @@ class VideoDecoderTest : public ::testing::TestWithParam<Codec> {
frame_size,
next_frame_timestamp_);
next_frame_timestamp_ += base::TimeDelta::FromSeconds(1) / kFrameRate;
- PopulateVideoFrame(video_frame, 0);
+ PopulateVideoFrame(video_frame.get(), 0);
// Encode |frame| into |encoded_frame->data|.
scoped_ptr<EncodedFrame> encoded_frame(
@@ -83,7 +84,12 @@ class VideoDecoderTest : public ::testing::TestWithParam<Codec> {
// Test only supports VP8, currently.
CHECK_EQ(CODEC_VIDEO_VP8, GetParam());
vp8_encoder_.Encode(video_frame, encoded_frame.get());
+ // Rewrite frame IDs for testing purposes.
encoded_frame->frame_id = last_frame_id_ + 1 + num_dropped_frames;
+ if (last_frame_id_ == 0)
+ encoded_frame->referenced_frame_id = encoded_frame->frame_id;
+ else
+ encoded_frame->referenced_frame_id = encoded_frame->frame_id - 1;
last_frame_id_ = encoded_frame->frame_id;
{
@@ -121,7 +127,7 @@ class VideoDecoderTest : public ::testing::TestWithParam<Codec> {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
// A NULL |video_frame| indicates a decode error, which we don't expect.
- ASSERT_FALSE(!video_frame);
+ ASSERT_FALSE(!video_frame.get());
// Did the decoder detect whether frames were dropped?
EXPECT_EQ(should_be_continuous, is_continuous);
diff --git a/media/cast/sender/audio_encoder.cc b/media/cast/sender/audio_encoder.cc
index 83bd594fb6..f0c5f8555e 100644
--- a/media/cast/sender/audio_encoder.cc
+++ b/media/cast/sender/audio_encoder.cc
@@ -56,7 +56,8 @@ class AudioEncoder::ImplBase
cast_initialization_status_(STATUS_AUDIO_UNINITIALIZED),
buffer_fill_end_(0),
frame_id_(0),
- frame_rtp_timestamp_(0) {
+ frame_rtp_timestamp_(0),
+ samples_dropped_from_buffer_(0) {
// Support for max sampling rate of 48KHz, 2 channels, 100 ms duration.
const int kMaxSamplesTimesChannelsPerFrame = 48 * 2 * 100;
if (num_channels_ <= 0 || samples_per_frame_ <= 0 ||
@@ -70,6 +71,10 @@ class AudioEncoder::ImplBase
return cast_initialization_status_;
}
+ int samples_per_frame() const {
+ return samples_per_frame_;
+ }
+
void EncodeAudio(scoped_ptr<AudioBus> audio_bus,
const base::TimeTicks& recorded_time) {
DCHECK_EQ(cast_initialization_status_, STATUS_AUDIO_INITIALIZED);
@@ -90,6 +95,7 @@ class AudioEncoder::ImplBase
recorded_time - (frame_capture_time_ + buffer_fill_duration);
if (amount_ahead_by >
base::TimeDelta::FromMilliseconds(kUnderrunThresholdMillis)) {
+ samples_dropped_from_buffer_ += buffer_fill_end_;
buffer_fill_end_ = 0;
buffer_fill_duration = base::TimeDelta();
const int64 num_frames_missed = amount_ahead_by /
@@ -129,7 +135,10 @@ class AudioEncoder::ImplBase
cast_environment_->PostTask(
CastEnvironment::MAIN,
FROM_HERE,
- base::Bind(callback_, base::Passed(&audio_frame)));
+ base::Bind(callback_,
+ base::Passed(&audio_frame),
+ samples_dropped_from_buffer_));
+ samples_dropped_from_buffer_ = 0;
}
// Reset the internal buffer, frame ID, and timestamps for the next frame.
@@ -182,6 +191,10 @@ class AudioEncoder::ImplBase
// the RTP timestamps.
base::TimeTicks frame_capture_time_;
+ // Set to non-zero to indicate the next output frame skipped over audio
+ // samples in order to recover from an input underrun.
+ int samples_dropped_from_buffer_;
+
DISALLOW_COPY_AND_ASSIGN(ImplBase);
};
@@ -359,17 +372,26 @@ AudioEncoder::~AudioEncoder() {}
CastInitializationStatus AudioEncoder::InitializationResult() const {
DCHECK(insert_thread_checker_.CalledOnValidThread());
- if (impl_) {
+ if (impl_.get()) {
return impl_->InitializationResult();
}
return STATUS_UNSUPPORTED_AUDIO_CODEC;
}
+int AudioEncoder::GetSamplesPerFrame() const {
+ DCHECK(insert_thread_checker_.CalledOnValidThread());
+ if (InitializationResult() != STATUS_AUDIO_INITIALIZED) {
+ NOTREACHED();
+ return std::numeric_limits<int>::max();
+ }
+ return impl_->samples_per_frame();
+}
+
void AudioEncoder::InsertAudio(scoped_ptr<AudioBus> audio_bus,
const base::TimeTicks& recorded_time) {
DCHECK(insert_thread_checker_.CalledOnValidThread());
DCHECK(audio_bus.get());
- if (!impl_) {
+ if (InitializationResult() != STATUS_AUDIO_INITIALIZED) {
NOTREACHED();
return;
}
diff --git a/media/cast/sender/audio_encoder.h b/media/cast/sender/audio_encoder.h
index 5f080c6cb6..e0a3d8a5ba 100644
--- a/media/cast/sender/audio_encoder.h
+++ b/media/cast/sender/audio_encoder.h
@@ -20,7 +20,9 @@ namespace cast {
class AudioEncoder {
public:
- typedef base::Callback<void(scoped_ptr<EncodedFrame>)>
+ // Callback to deliver each EncodedFrame, plus the number of audio samples
+ // skipped since the last frame.
+ typedef base::Callback<void(scoped_ptr<EncodedFrame>, int)>
FrameEncodedCallback;
AudioEncoder(const scoped_refptr<CastEnvironment>& cast_environment,
@@ -33,6 +35,8 @@ class AudioEncoder {
CastInitializationStatus InitializationResult() const;
+ int GetSamplesPerFrame() const;
+
void InsertAudio(scoped_ptr<AudioBus> audio_bus,
const base::TimeTicks& recorded_time);
diff --git a/media/cast/sender/audio_encoder_unittest.cc b/media/cast/sender/audio_encoder_unittest.cc
index 0764148a4c..a33ed3bc0e 100644
--- a/media/cast/sender/audio_encoder_unittest.cc
+++ b/media/cast/sender/audio_encoder_unittest.cc
@@ -39,7 +39,8 @@ class TestEncodedAudioFrameReceiver {
upper_bound_ = upper_bound;
}
- void FrameEncoded(scoped_ptr<EncodedFrame> encoded_frame) {
+ void FrameEncoded(scoped_ptr<EncodedFrame> encoded_frame,
+ int samples_skipped) {
EXPECT_EQ(encoded_frame->dependency, EncodedFrame::KEY);
EXPECT_EQ(static_cast<uint8>(frames_received_ & 0xff),
encoded_frame->frame_id);
diff --git a/media/cast/sender/audio_sender.cc b/media/cast/sender/audio_sender.cc
index 4bf93b30cd..23fd6d1072 100644
--- a/media/cast/sender/audio_sender.cc
+++ b/media/cast/sender/audio_sender.cc
@@ -15,9 +15,6 @@ namespace media {
namespace cast {
namespace {
-const int kNumAggressiveReportsSentAtStart = 100;
-const int kMinSchedulingDelayMs = 1;
-
// TODO(miu): This should be specified in AudioSenderConfig, but currently it is
// fixed to 100 FPS (i.e., 10 ms per frame), and AudioEncoder assumes this as
// well.
@@ -30,19 +27,18 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
CastTransportSender* const transport_sender)
: FrameSender(
cast_environment,
+ true,
transport_sender,
base::TimeDelta::FromMilliseconds(audio_config.rtcp_interval),
audio_config.frequency,
audio_config.ssrc,
- kAudioFrameRate * 2.0, // We lie to increase max outstanding frames.
- audio_config.target_playout_delay),
- configured_encoder_bitrate_(audio_config.bitrate),
- num_aggressive_rtcp_reports_sent_(0),
- last_sent_frame_id_(0),
- latest_acked_frame_id_(0),
- duplicate_ack_counter_(0),
- cast_initialization_status_(STATUS_AUDIO_UNINITIALIZED),
+ kAudioFrameRate,
+ audio_config.min_playout_delay,
+ audio_config.max_playout_delay,
+ NewFixedCongestionControl(audio_config.bitrate)),
+ samples_in_encoder_(0),
weak_factory_(this) {
+ cast_initialization_status_ = STATUS_AUDIO_UNINITIALIZED;
VLOG(1) << "max_unacked_frames " << max_unacked_frames_;
DCHECK_GT(max_unacked_frames_, 0);
@@ -53,8 +49,9 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
audio_config.frequency,
audio_config.bitrate,
audio_config.codec,
- base::Bind(&AudioSender::SendEncodedAudioFrame,
- weak_factory_.GetWeakPtr())));
+ base::Bind(&AudioSender::OnEncodedAudioFrame,
+ weak_factory_.GetWeakPtr(),
+ audio_config.bitrate)));
cast_initialization_status_ = audio_encoder_->InitializationResult();
} else {
NOTREACHED(); // No support for external audio encoding.
@@ -65,9 +62,6 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
transport_config.ssrc = audio_config.ssrc;
transport_config.feedback_ssrc = audio_config.incoming_feedback_ssrc;
transport_config.rtp_payload_type = audio_config.rtp_payload_type;
- // TODO(miu): AudioSender needs to be like VideoSender in providing an upper
- // limit on the number of in-flight frames.
- transport_config.stored_frames = max_unacked_frames_;
transport_config.aes_key = audio_config.aes_key;
transport_config.aes_iv_mask = audio_config.aes_iv_mask;
@@ -75,8 +69,8 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
transport_config,
base::Bind(&AudioSender::OnReceivedCastFeedback,
weak_factory_.GetWeakPtr()),
- base::Bind(&AudioSender::OnReceivedRtt, weak_factory_.GetWeakPtr()));
- memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_));
+ base::Bind(&AudioSender::OnMeasuredRoundTripTime,
+ weak_factory_.GetWeakPtr()));
}
AudioSender::~AudioSender() {}
@@ -90,180 +84,41 @@ void AudioSender::InsertAudio(scoped_ptr<AudioBus> audio_bus,
}
DCHECK(audio_encoder_.get()) << "Invalid internal state";
- if (AreTooManyFramesInFlight()) {
- VLOG(1) << "Dropping frame due to too many frames currently in-flight.";
+ const base::TimeDelta next_frame_duration =
+ RtpDeltaToTimeDelta(audio_bus->frames(), rtp_timebase());
+ if (ShouldDropNextFrame(next_frame_duration))
return;
- }
+
+ samples_in_encoder_ += audio_bus->frames();
audio_encoder_->InsertAudio(audio_bus.Pass(), recorded_time);
}
-void AudioSender::SendEncodedAudioFrame(
- scoped_ptr<EncodedFrame> encoded_frame) {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
-
- const uint32 frame_id = encoded_frame->frame_id;
-
- const bool is_first_frame_to_be_sent = last_send_time_.is_null();
- last_send_time_ = cast_environment_->Clock()->NowTicks();
- last_sent_frame_id_ = frame_id;
- // If this is the first frame about to be sent, fake the value of
- // |latest_acked_frame_id_| to indicate the receiver starts out all caught up.
- // Also, schedule the periodic frame re-send checks.
- if (is_first_frame_to_be_sent) {
- latest_acked_frame_id_ = frame_id - 1;
- ScheduleNextResendCheck();
- }
-
- cast_environment_->Logging()->InsertEncodedFrameEvent(
- last_send_time_, FRAME_ENCODED, AUDIO_EVENT, encoded_frame->rtp_timestamp,
- frame_id, static_cast<int>(encoded_frame->data.size()),
- encoded_frame->dependency == EncodedFrame::KEY,
- configured_encoder_bitrate_);
- // Only use lowest 8 bits as key.
- frame_id_to_rtp_timestamp_[frame_id & 0xff] = encoded_frame->rtp_timestamp;
-
- DCHECK(!encoded_frame->reference_time.is_null());
- rtp_timestamp_helper_.StoreLatestTime(encoded_frame->reference_time,
- encoded_frame->rtp_timestamp);
-
- // At the start of the session, it's important to send reports before each
- // frame so that the receiver can properly compute playout times. The reason
- // more than one report is sent is because transmission is not guaranteed,
- // only best effort, so we send enough that one should almost certainly get
- // through.
- if (num_aggressive_rtcp_reports_sent_ < kNumAggressiveReportsSentAtStart) {
- // SendRtcpReport() will schedule future reports to be made if this is the
- // last "aggressive report."
- ++num_aggressive_rtcp_reports_sent_;
- const bool is_last_aggressive_report =
- (num_aggressive_rtcp_reports_sent_ == kNumAggressiveReportsSentAtStart);
- VLOG_IF(1, is_last_aggressive_report) << "Sending last aggressive report.";
- SendRtcpReport(is_last_aggressive_report);
- }
-
- if (send_target_playout_delay_) {
- encoded_frame->new_playout_delay_ms =
- target_playout_delay_.InMilliseconds();
- }
- transport_sender_->InsertCodedAudioFrame(*encoded_frame);
+int AudioSender::GetNumberOfFramesInEncoder() const {
+ // Note: It's possible for a partial frame to be in the encoder, but returning
+ // the floor() is good enough for the "design limit" check in FrameSender.
+ return samples_in_encoder_ / audio_encoder_->GetSamplesPerFrame();
}
-void AudioSender::ScheduleNextResendCheck() {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- DCHECK(!last_send_time_.is_null());
- base::TimeDelta time_to_next =
- last_send_time_ - cast_environment_->Clock()->NowTicks() +
- target_playout_delay_;
- time_to_next = std::max(
- time_to_next, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
- cast_environment_->PostDelayedTask(
- CastEnvironment::MAIN,
- FROM_HERE,
- base::Bind(&AudioSender::ResendCheck, weak_factory_.GetWeakPtr()),
- time_to_next);
+base::TimeDelta AudioSender::GetInFlightMediaDuration() const {
+ const int samples_in_flight = samples_in_encoder_ +
+ GetUnacknowledgedFrameCount() * audio_encoder_->GetSamplesPerFrame();
+ return RtpDeltaToTimeDelta(samples_in_flight, rtp_timebase());
}
-void AudioSender::ResendCheck() {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- DCHECK(!last_send_time_.is_null());
- const base::TimeDelta time_since_last_send =
- cast_environment_->Clock()->NowTicks() - last_send_time_;
- if (time_since_last_send > target_playout_delay_) {
- if (latest_acked_frame_id_ == last_sent_frame_id_) {
- // Last frame acked, no point in doing anything
- } else {
- VLOG(1) << "ACK timeout; last acked frame: " << latest_acked_frame_id_;
- ResendForKickstart();
- }
- }
- ScheduleNextResendCheck();
+void AudioSender::OnAck(uint32 frame_id) {
}
-void AudioSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
+void AudioSender::OnEncodedAudioFrame(
+ int encoder_bitrate,
+ scoped_ptr<EncodedFrame> encoded_frame,
+ int samples_skipped) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- if (is_rtt_available()) {
- // Having the RTT values implies the receiver sent back a receiver report
- // based on it having received a report from here. Therefore, ensure this
- // sender stops aggressively sending reports.
- if (num_aggressive_rtcp_reports_sent_ < kNumAggressiveReportsSentAtStart) {
- VLOG(1) << "No longer a need to send reports aggressively (sent "
- << num_aggressive_rtcp_reports_sent_ << ").";
- num_aggressive_rtcp_reports_sent_ = kNumAggressiveReportsSentAtStart;
- ScheduleNextRtcpReport();
- }
- }
-
- if (last_send_time_.is_null())
- return; // Cannot get an ACK without having first sent a frame.
-
- if (cast_feedback.missing_frames_and_packets.empty()) {
- // We only count duplicate ACKs when we have sent newer frames.
- if (latest_acked_frame_id_ == cast_feedback.ack_frame_id &&
- latest_acked_frame_id_ != last_sent_frame_id_) {
- duplicate_ack_counter_++;
- } else {
- duplicate_ack_counter_ = 0;
- }
- // TODO(miu): The values "2" and "3" should be derived from configuration.
- if (duplicate_ack_counter_ >= 2 && duplicate_ack_counter_ % 3 == 2) {
- VLOG(1) << "Received duplicate ACK for frame " << latest_acked_frame_id_;
- ResendForKickstart();
- }
- } else {
- // Only count duplicated ACKs if there is no NACK request in between.
- // This is to avoid aggresive resend.
- duplicate_ack_counter_ = 0;
- }
-
- const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
+ samples_in_encoder_ -= audio_encoder_->GetSamplesPerFrame() + samples_skipped;
+ DCHECK_GE(samples_in_encoder_, 0);
- const RtpTimestamp rtp_timestamp =
- frame_id_to_rtp_timestamp_[cast_feedback.ack_frame_id & 0xff];
- cast_environment_->Logging()->InsertFrameEvent(now,
- FRAME_ACK_RECEIVED,
- AUDIO_EVENT,
- rtp_timestamp,
- cast_feedback.ack_frame_id);
-
- const bool is_acked_out_of_order =
- static_cast<int32>(cast_feedback.ack_frame_id -
- latest_acked_frame_id_) < 0;
- VLOG(2) << "Received ACK" << (is_acked_out_of_order ? " out-of-order" : "")
- << " for frame " << cast_feedback.ack_frame_id;
- if (!is_acked_out_of_order) {
- // Cancel resends of acked frames.
- std::vector<uint32> cancel_sending_frames;
- while (latest_acked_frame_id_ != cast_feedback.ack_frame_id) {
- latest_acked_frame_id_++;
- cancel_sending_frames.push_back(latest_acked_frame_id_);
- }
- transport_sender_->CancelSendingFrames(ssrc_, cancel_sending_frames);
- latest_acked_frame_id_ = cast_feedback.ack_frame_id;
- }
-}
-
-bool AudioSender::AreTooManyFramesInFlight() const {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- int frames_in_flight = 0;
- if (!last_send_time_.is_null()) {
- frames_in_flight +=
- static_cast<int32>(last_sent_frame_id_ - latest_acked_frame_id_);
- }
- VLOG(2) << frames_in_flight
- << " frames in flight; last sent: " << last_sent_frame_id_
- << " latest acked: " << latest_acked_frame_id_;
- return frames_in_flight >= max_unacked_frames_;
-}
-
-void AudioSender::ResendForKickstart() {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- DCHECK(!last_send_time_.is_null());
- VLOG(1) << "Resending last packet of frame " << last_sent_frame_id_
- << " to kick-start.";
- last_send_time_ = cast_environment_->Clock()->NowTicks();
- transport_sender_->ResendFrameForKickstart(ssrc_, last_sent_frame_id_);
+ SendEncodedFrame(encoder_bitrate, encoded_frame.Pass());
}
} // namespace cast
diff --git a/media/cast/sender/audio_sender.h b/media/cast/sender/audio_sender.h
index e07b892a73..791cc8e6fd 100644
--- a/media/cast/sender/audio_sender.h
+++ b/media/cast/sender/audio_sender.h
@@ -51,64 +51,21 @@ class AudioSender : public FrameSender,
const base::TimeTicks& recorded_time);
protected:
- // Protected for testability.
- void OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback);
+ virtual int GetNumberOfFramesInEncoder() const OVERRIDE;
+ virtual base::TimeDelta GetInFlightMediaDuration() const OVERRIDE;
+ virtual void OnAck(uint32 frame_id) OVERRIDE;
private:
- // Schedule and execute periodic checks for re-sending packets. If no
- // acknowledgements have been received for "too long," AudioSender will
- // speculatively re-send certain packets of an unacked frame to kick-start
- // re-transmission. This is a last resort tactic to prevent the session from
- // getting stuck after a long outage.
- void ScheduleNextResendCheck();
- void ResendCheck();
- void ResendForKickstart();
-
- // Returns true if there are too many frames in flight, as defined by the
- // configured target playout delay plus simple logic. When this is true,
- // InsertAudio() will silenty drop frames instead of sending them to the audio
- // encoder.
- bool AreTooManyFramesInFlight() const;
-
// Called by the |audio_encoder_| with the next EncodedFrame to send.
- void SendEncodedAudioFrame(scoped_ptr<EncodedFrame> audio_frame);
+ void OnEncodedAudioFrame(int encoder_bitrate,
+ scoped_ptr<EncodedFrame> encoded_frame,
+ int samples_skipped);
// Encodes AudioBuses into EncodedFrames.
scoped_ptr<AudioEncoder> audio_encoder_;
- const int configured_encoder_bitrate_;
-
- // Counts how many RTCP reports are being "aggressively" sent (i.e., one per
- // frame) at the start of the session. Once a threshold is reached, RTCP
- // reports are instead sent at the configured interval + random drift.
- int num_aggressive_rtcp_reports_sent_;
-
- // This is "null" until the first frame is sent. Thereafter, this tracks the
- // last time any frame was sent or re-sent.
- base::TimeTicks last_send_time_;
-
- // The ID of the last frame sent. Logic throughout AudioSender assumes this
- // can safely wrap-around. This member is invalid until
- // |!last_send_time_.is_null()|.
- uint32 last_sent_frame_id_;
-
- // The ID of the latest (not necessarily the last) frame that has been
- // acknowledged. Logic throughout AudioSender assumes this can safely
- // wrap-around. This member is invalid until |!last_send_time_.is_null()|.
- uint32 latest_acked_frame_id_;
-
- // Counts the number of duplicate ACK that are being received. When this
- // number reaches a threshold, the sender will take this as a sign that the
- // receiver hasn't yet received the first packet of the next frame. In this
- // case, AudioSender will trigger a re-send of the next frame.
- int duplicate_ack_counter_;
-
- // If this sender is ready for use, this is STATUS_AUDIO_INITIALIZED.
- CastInitializationStatus cast_initialization_status_;
-
- // This is a "good enough" mapping for finding the RTP timestamp associated
- // with a video frame. The key is the lowest 8 bits of frame id (which is
- // what is sent via RTCP). This map is used for logging purposes.
- RtpTimestamp frame_id_to_rtp_timestamp_[256];
+
+ // The number of audio samples enqueued in |audio_encoder_|.
+ int samples_in_encoder_;
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<AudioSender> weak_factory_;
diff --git a/media/cast/sender/audio_sender_unittest.cc b/media/cast/sender/audio_sender_unittest.cc
index 6c99af9d3c..b651c83ba6 100644
--- a/media/cast/sender/audio_sender_unittest.cc
+++ b/media/cast/sender/audio_sender_unittest.cc
@@ -81,6 +81,7 @@ class AudioSenderTest : public ::testing::Test {
NULL,
testing_clock_,
dummy_endpoint,
+ make_scoped_ptr(new base::DictionaryValue),
base::Bind(&UpdateCastTransportStatus),
BulkRawEventsCallback(),
base::TimeDelta(),
diff --git a/media/cast/sender/congestion_control.cc b/media/cast/sender/congestion_control.cc
index 9efe50adbf..30e3be7d6e 100644
--- a/media/cast/sender/congestion_control.cc
+++ b/media/cast/sender/congestion_control.cc
@@ -22,6 +22,117 @@
namespace media {
namespace cast {
+class AdaptiveCongestionControl : public CongestionControl {
+ public:
+ AdaptiveCongestionControl(base::TickClock* clock,
+ uint32 max_bitrate_configured,
+ uint32 min_bitrate_configured,
+ size_t max_unacked_frames);
+
+ virtual ~AdaptiveCongestionControl() OVERRIDE;
+
+ virtual void UpdateRtt(base::TimeDelta rtt) OVERRIDE;
+
+ // Called when an encoded frame is sent to the transport.
+ virtual void SendFrameToTransport(uint32 frame_id,
+ size_t frame_size,
+ base::TimeTicks when) OVERRIDE;
+
+ // Called when we receive an ACK for a frame.
+ virtual void AckFrame(uint32 frame_id, base::TimeTicks when) OVERRIDE;
+
+ // Returns the bitrate we should use for the next frame.
+ virtual uint32 GetBitrate(base::TimeTicks playout_time,
+ base::TimeDelta playout_delay) OVERRIDE;
+
+ private:
+ struct FrameStats {
+ FrameStats();
+ // Time this frame was sent to the transport.
+ base::TimeTicks sent_time;
+ // Time this frame was acked.
+ base::TimeTicks ack_time;
+ // Size of encoded frame in bits.
+ size_t frame_size;
+ };
+
+ // Calculate how much "dead air" (idle time) there is between two frames.
+ static base::TimeDelta DeadTime(const FrameStats& a, const FrameStats& b);
+ // Get the FrameStats for a given |frame_id|.
+ // Note: Older FrameStats will be removed automatically.
+ FrameStats* GetFrameStats(uint32 frame_id);
+ // Calculate a safe bitrate. This is based on how much we've been
+ // sending in the past.
+ double CalculateSafeBitrate();
+
+ // For a given frame, calculate when it might be acked.
+ // (Or return the time it was acked, if it was.)
+ base::TimeTicks EstimatedAckTime(uint32 frame_id, double bitrate);
+ // Calculate when we start sending the data for a given frame.
+ // This is done by calculating when we were done sending the previous
+ // frame, but obviously can't be less than |sent_time| (if known).
+ base::TimeTicks EstimatedSendingTime(uint32 frame_id, double bitrate);
+
+ base::TickClock* const clock_; // Not owned by this class.
+ const uint32 max_bitrate_configured_;
+ const uint32 min_bitrate_configured_;
+ std::deque<FrameStats> frame_stats_;
+ uint32 last_frame_stats_;
+ uint32 last_acked_frame_;
+ uint32 last_encoded_frame_;
+ base::TimeDelta rtt_;
+ size_t history_size_;
+ size_t acked_bits_in_history_;
+ base::TimeDelta dead_time_in_history_;
+
+ DISALLOW_COPY_AND_ASSIGN(AdaptiveCongestionControl);
+};
+
+class FixedCongestionControl : public CongestionControl {
+ public:
+ FixedCongestionControl(uint32 bitrate) : bitrate_(bitrate) {}
+ virtual ~FixedCongestionControl() OVERRIDE {}
+
+ virtual void UpdateRtt(base::TimeDelta rtt) OVERRIDE {
+ }
+
+ // Called when an encoded frame is sent to the transport.
+ virtual void SendFrameToTransport(uint32 frame_id,
+ size_t frame_size,
+ base::TimeTicks when) OVERRIDE {
+ }
+
+ // Called when we receive an ACK for a frame.
+ virtual void AckFrame(uint32 frame_id, base::TimeTicks when) OVERRIDE {
+ }
+
+ // Returns the bitrate we should use for the next frame.
+ virtual uint32 GetBitrate(base::TimeTicks playout_time,
+ base::TimeDelta playout_delay) OVERRIDE {
+ return bitrate_;
+ }
+
+ private:
+ uint32 bitrate_;
+ DISALLOW_COPY_AND_ASSIGN(FixedCongestionControl);
+};
+
+
+CongestionControl* NewAdaptiveCongestionControl(
+ base::TickClock* clock,
+ uint32 max_bitrate_configured,
+ uint32 min_bitrate_configured,
+ size_t max_unacked_frames) {
+ return new AdaptiveCongestionControl(clock,
+ max_bitrate_configured,
+ min_bitrate_configured,
+ max_unacked_frames);
+}
+
+CongestionControl* NewFixedCongestionControl(uint32 bitrate) {
+ return new FixedCongestionControl(bitrate);
+}
+
// This means that we *try* to keep our buffer 90% empty.
// If it is less full, we increase the bandwidth, if it is more
// we decrease the bandwidth. Making this smaller makes the
@@ -32,13 +143,14 @@ static const double kTargetEmptyBufferFraction = 0.9;
// congestion control adapt slower.
static const size_t kHistorySize = 100;
-CongestionControl::FrameStats::FrameStats() : frame_size(0) {
+AdaptiveCongestionControl::FrameStats::FrameStats() : frame_size(0) {
}
-CongestionControl::CongestionControl(base::TickClock* clock,
- uint32 max_bitrate_configured,
- uint32 min_bitrate_configured,
- size_t max_unacked_frames)
+AdaptiveCongestionControl::AdaptiveCongestionControl(
+ base::TickClock* clock,
+ uint32 max_bitrate_configured,
+ uint32 min_bitrate_configured,
+ size_t max_unacked_frames)
: clock_(clock),
max_bitrate_configured_(max_bitrate_configured),
min_bitrate_configured_(min_bitrate_configured),
@@ -57,14 +169,15 @@ CongestionControl::CongestionControl(base::TickClock* clock,
}
CongestionControl::~CongestionControl() {}
+AdaptiveCongestionControl::~AdaptiveCongestionControl() {}
-void CongestionControl::UpdateRtt(base::TimeDelta rtt) {
+void AdaptiveCongestionControl::UpdateRtt(base::TimeDelta rtt) {
rtt_ = (7 * rtt_ + rtt) / 8;
}
// Calculate how much "dead air" there is between two frames.
-base::TimeDelta CongestionControl::DeadTime(const FrameStats& a,
- const FrameStats& b) {
+base::TimeDelta AdaptiveCongestionControl::DeadTime(const FrameStats& a,
+ const FrameStats& b) {
if (b.sent_time > a.ack_time) {
return b.sent_time - a.ack_time;
} else {
@@ -72,7 +185,7 @@ base::TimeDelta CongestionControl::DeadTime(const FrameStats& a,
}
}
-double CongestionControl::CalculateSafeBitrate() {
+double AdaptiveCongestionControl::CalculateSafeBitrate() {
double transmit_time =
(GetFrameStats(last_acked_frame_)->ack_time -
frame_stats_.front().sent_time - dead_time_in_history_).InSecondsF();
@@ -83,8 +196,8 @@ double CongestionControl::CalculateSafeBitrate() {
return acked_bits_in_history_ / std::max(transmit_time, 1E-3);
}
-CongestionControl::FrameStats* CongestionControl::GetFrameStats(
- uint32 frame_id) {
+AdaptiveCongestionControl::FrameStats*
+AdaptiveCongestionControl::GetFrameStats(uint32 frame_id) {
int32 offset = static_cast<int32>(frame_id - last_frame_stats_);
DCHECK_LT(offset, static_cast<int32>(kHistorySize));
if (offset > 0) {
@@ -109,7 +222,8 @@ CongestionControl::FrameStats* CongestionControl::GetFrameStats(
return &frame_stats_[offset];
}
-void CongestionControl::AckFrame(uint32 frame_id, base::TimeTicks when) {
+void AdaptiveCongestionControl::AckFrame(uint32 frame_id,
+ base::TimeTicks when) {
FrameStats* frame_stats = GetFrameStats(last_acked_frame_);
while (IsNewerFrameId(frame_id, last_acked_frame_)) {
FrameStats* last_frame_stats = frame_stats;
@@ -129,9 +243,9 @@ void CongestionControl::AckFrame(uint32 frame_id, base::TimeTicks when) {
}
}
-void CongestionControl::SendFrameToTransport(uint32 frame_id,
- size_t frame_size,
- base::TimeTicks when) {
+void AdaptiveCongestionControl::SendFrameToTransport(uint32 frame_id,
+ size_t frame_size,
+ base::TimeTicks when) {
last_encoded_frame_ = frame_id;
FrameStats* frame_stats = GetFrameStats(frame_id);
DCHECK(frame_stats);
@@ -139,8 +253,8 @@ void CongestionControl::SendFrameToTransport(uint32 frame_id,
frame_stats->sent_time = when;
}
-base::TimeTicks CongestionControl::EstimatedAckTime(uint32 frame_id,
- double bitrate) {
+base::TimeTicks AdaptiveCongestionControl::EstimatedAckTime(uint32 frame_id,
+ double bitrate) {
FrameStats* frame_stats = GetFrameStats(frame_id);
DCHECK(frame_stats);
if (frame_stats->ack_time.is_null()) {
@@ -164,8 +278,9 @@ base::TimeTicks CongestionControl::EstimatedAckTime(uint32 frame_id,
}
}
-base::TimeTicks CongestionControl::EstimatedSendingTime(uint32 frame_id,
- double bitrate) {
+base::TimeTicks AdaptiveCongestionControl::EstimatedSendingTime(
+ uint32 frame_id,
+ double bitrate) {
FrameStats* frame_stats = GetFrameStats(frame_id);
DCHECK(frame_stats);
base::TimeTicks ret = EstimatedAckTime(frame_id - 1, bitrate) - rtt_;
@@ -177,8 +292,8 @@ base::TimeTicks CongestionControl::EstimatedSendingTime(uint32 frame_id,
}
}
-uint32 CongestionControl::GetBitrate(base::TimeTicks playout_time,
- base::TimeDelta playout_delay) {
+uint32 AdaptiveCongestionControl::GetBitrate(base::TimeTicks playout_time,
+ base::TimeDelta playout_delay) {
double safe_bitrate = CalculateSafeBitrate();
// Estimate when we might start sending the next frame.
base::TimeDelta time_to_catch_up =
diff --git a/media/cast/sender/congestion_control.h b/media/cast/sender/congestion_control.h
index 8537037c71..5d1256f75a 100644
--- a/media/cast/sender/congestion_control.h
+++ b/media/cast/sender/congestion_control.h
@@ -17,69 +17,30 @@ namespace cast {
class CongestionControl {
public:
- CongestionControl(base::TickClock* clock,
- uint32 max_bitrate_configured,
- uint32 min_bitrate_configured,
- size_t max_unacked_frames);
-
virtual ~CongestionControl();
- void UpdateRtt(base::TimeDelta rtt);
+ // Called with latest measured rtt value.
+ virtual void UpdateRtt(base::TimeDelta rtt) = 0;
// Called when an encoded frame is sent to the transport.
- void SendFrameToTransport(uint32 frame_id,
- size_t frame_size,
- base::TimeTicks when);
-
+ virtual void SendFrameToTransport(uint32 frame_id,
+ size_t frame_size,
+ base::TimeTicks when) = 0;
// Called when we receive an ACK for a frame.
- void AckFrame(uint32 frame_id, base::TimeTicks when);
+ virtual void AckFrame(uint32 frame_id, base::TimeTicks when) = 0;
// Returns the bitrate we should use for the next frame.
- uint32 GetBitrate(base::TimeTicks playout_time,
- base::TimeDelta playout_delay);
-
- private:
- struct FrameStats {
- FrameStats();
- // Time this frame was sent to the transport.
- base::TimeTicks sent_time;
- // Time this frame was acked.
- base::TimeTicks ack_time;
- // Size of encoded frame in bits.
- size_t frame_size;
- };
-
- // Calculate how much "dead air" (idle time) there is between two frames.
- static base::TimeDelta DeadTime(const FrameStats& a, const FrameStats& b);
- // Get the FrameStats for a given |frame_id|.
- // Note: Older FrameStats will be removed automatically.
- FrameStats* GetFrameStats(uint32 frame_id);
- // Calculate a safe bitrate. This is based on how much we've been
- // sending in the past.
- double CalculateSafeBitrate();
-
- // For a given frame, calculate when it might be acked.
- // (Or return the time it was acked, if it was.)
- base::TimeTicks EstimatedAckTime(uint32 frame_id, double bitrate);
- // Calculate when we start sending the data for a given frame.
- // This is done by calculating when we were done sending the previous
- // frame, but obviously can't be less than |sent_time| (if known).
- base::TimeTicks EstimatedSendingTime(uint32 frame_id, double bitrate);
+ virtual uint32 GetBitrate(base::TimeTicks playout_time,
+ base::TimeDelta playout_delay) = 0;
+};
- base::TickClock* const clock_; // Not owned by this class.
- const uint32 max_bitrate_configured_;
- const uint32 min_bitrate_configured_;
- std::deque<FrameStats> frame_stats_;
- uint32 last_frame_stats_;
- uint32 last_acked_frame_;
- uint32 last_encoded_frame_;
- base::TimeDelta rtt_;
- size_t history_size_;
- size_t acked_bits_in_history_;
- base::TimeDelta dead_time_in_history_;
+CongestionControl* NewAdaptiveCongestionControl(
+ base::TickClock* clock,
+ uint32 max_bitrate_configured,
+ uint32 min_bitrate_configured,
+ size_t max_unacked_frames);
- DISALLOW_COPY_AND_ASSIGN(CongestionControl);
-};
+CongestionControl* NewFixedCongestionControl(uint32 bitrate);
} // namespace cast
} // namespace media
diff --git a/media/cast/sender/congestion_control_unittest.cc b/media/cast/sender/congestion_control_unittest.cc
index afdce0031f..ec68c02625 100644
--- a/media/cast/sender/congestion_control_unittest.cc
+++ b/media/cast/sender/congestion_control_unittest.cc
@@ -25,7 +25,7 @@ class CongestionControlTest : public ::testing::Test {
: task_runner_(new test::FakeSingleThreadTaskRunner(&testing_clock_)) {
testing_clock_.Advance(
base::TimeDelta::FromMilliseconds(kStartMillisecond));
- congestion_control_.reset(new CongestionControl(
+ congestion_control_.reset(NewAdaptiveCongestionControl(
&testing_clock_, kMaxBitrateConfigured, kMinBitrateConfigured, 10));
}
diff --git a/media/cast/sender/external_video_encoder.cc b/media/cast/sender/external_video_encoder.cc
index fbc24d82cb..5aca424d47 100644
--- a/media/cast/sender/external_video_encoder.cc
+++ b/media/cast/sender/external_video_encoder.cc
@@ -84,7 +84,7 @@ class LocalVideoEncodeAcceleratorClient
// Initialize the real HW encoder.
void Initialize(const VideoSenderConfig& video_config) {
- DCHECK(encoder_task_runner_);
+ DCHECK(encoder_task_runner_.get());
DCHECK(encoder_task_runner_->RunsTasksOnCurrentThread());
VideoCodecProfile output_profile = media::VIDEO_CODEC_PROFILE_UNKNOWN;
@@ -114,7 +114,11 @@ class LocalVideoEncodeAcceleratorClient
UMA_HISTOGRAM_BOOLEAN("Cast.Sender.VideoEncodeAcceleratorInitializeSuccess",
result);
if (!result) {
- NotifyError(VideoEncodeAccelerator::kInvalidArgumentError);
+ cast_environment_->PostTask(
+ CastEnvironment::MAIN,
+ FROM_HERE,
+ base::Bind(&ExternalVideoEncoder::EncoderInitialized, weak_owner_,
+ false));
return;
}
@@ -124,7 +128,7 @@ class LocalVideoEncodeAcceleratorClient
// Destroy the VEA on the correct thread.
void Destroy() {
- DCHECK(encoder_task_runner_);
+ DCHECK(encoder_task_runner_.get());
if (!video_encode_accelerator_)
return;
@@ -141,7 +145,7 @@ class LocalVideoEncodeAcceleratorClient
}
void SetBitRate(uint32 bit_rate) {
- DCHECK(encoder_task_runner_);
+ DCHECK(encoder_task_runner_.get());
DCHECK(encoder_task_runner_->RunsTasksOnCurrentThread());
video_encode_accelerator_->RequestEncodingParametersChange(bit_rate,
@@ -153,7 +157,7 @@ class LocalVideoEncodeAcceleratorClient
const base::TimeTicks& capture_time,
bool key_frame_requested,
const VideoEncoder::FrameEncodedCallback& frame_encoded_callback) {
- DCHECK(encoder_task_runner_);
+ DCHECK(encoder_task_runner_.get());
DCHECK(encoder_task_runner_->RunsTasksOnCurrentThread());
encoded_frame_data_storage_.push_back(
@@ -165,7 +169,7 @@ class LocalVideoEncodeAcceleratorClient
protected:
virtual void NotifyError(VideoEncodeAccelerator::Error error) OVERRIDE {
- DCHECK(encoder_task_runner_);
+ DCHECK(encoder_task_runner_.get());
DCHECK(encoder_task_runner_->RunsTasksOnCurrentThread());
VLOG(1) << "ExternalVideoEncoder NotifyError: " << error;
@@ -179,7 +183,7 @@ class LocalVideoEncodeAcceleratorClient
virtual void RequireBitstreamBuffers(unsigned int input_count,
const gfx::Size& input_coded_size,
size_t output_buffer_size) OVERRIDE {
- DCHECK(encoder_task_runner_);
+ DCHECK(encoder_task_runner_.get());
DCHECK(encoder_task_runner_->RunsTasksOnCurrentThread());
DCHECK(video_encode_accelerator_);
@@ -196,7 +200,7 @@ class LocalVideoEncodeAcceleratorClient
virtual void BitstreamBufferReady(int32 bitstream_buffer_id,
size_t payload_size,
bool key_frame) OVERRIDE {
- DCHECK(encoder_task_runner_);
+ DCHECK(encoder_task_runner_.get());
DCHECK(encoder_task_runner_->RunsTasksOnCurrentThread());
if (bitstream_buffer_id < 0 ||
bitstream_buffer_id >= static_cast<int32>(output_buffers_.size())) {
@@ -322,7 +326,7 @@ class LocalVideoEncodeAcceleratorClient
}
void ReceivedSharedMemory(scoped_ptr<base::SharedMemory> memory) {
- DCHECK(encoder_task_runner_);
+ DCHECK(encoder_task_runner_.get());
DCHECK(encoder_task_runner_->RunsTasksOnCurrentThread());
output_buffers_.push_back(memory.release());
@@ -342,7 +346,8 @@ class LocalVideoEncodeAcceleratorClient
cast_environment_->PostTask(
CastEnvironment::MAIN,
FROM_HERE,
- base::Bind(&ExternalVideoEncoder::EncoderInitialized, weak_owner_));
+ base::Bind(&ExternalVideoEncoder::EncoderInitialized, weak_owner_,
+ true));
}
static void DestroyVideoEncodeAcceleratorOnEncoderThread(
@@ -379,12 +384,14 @@ class LocalVideoEncodeAcceleratorClient
ExternalVideoEncoder::ExternalVideoEncoder(
scoped_refptr<CastEnvironment> cast_environment,
const VideoSenderConfig& video_config,
+ const CastInitializationCallback& initialization_cb,
const CreateVideoEncodeAcceleratorCallback& create_vea_cb,
const CreateVideoEncodeMemoryCallback& create_video_encode_mem_cb)
: video_config_(video_config),
cast_environment_(cast_environment),
encoder_active_(false),
key_frame_requested_(false),
+ initialization_cb_(initialization_cb),
weak_factory_(this) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
@@ -393,15 +400,20 @@ ExternalVideoEncoder::ExternalVideoEncoder(
create_vea_cb,
create_video_encode_mem_cb,
weak_factory_.GetWeakPtr());
- DCHECK(video_accelerator_client_);
+ DCHECK(video_accelerator_client_.get());
}
ExternalVideoEncoder::~ExternalVideoEncoder() {
}
-void ExternalVideoEncoder::EncoderInitialized() {
+void ExternalVideoEncoder::EncoderInitialized(bool success) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- encoder_active_ = true;
+ encoder_active_ = success;
+ DCHECK(!initialization_cb_.is_null());
+ initialization_cb_.Run(
+ success ?
+ STATUS_VIDEO_INITIALIZED : STATUS_HW_VIDEO_ENCODER_NOT_SUPPORTED);
+ initialization_cb_.Reset();
}
void ExternalVideoEncoder::EncoderError() {
diff --git a/media/cast/sender/external_video_encoder.h b/media/cast/sender/external_video_encoder.h
index 269fb3e7c8..3a5f73b38e 100644
--- a/media/cast/sender/external_video_encoder.h
+++ b/media/cast/sender/external_video_encoder.h
@@ -28,6 +28,7 @@ class ExternalVideoEncoder : public VideoEncoder {
ExternalVideoEncoder(
scoped_refptr<CastEnvironment> cast_environment,
const VideoSenderConfig& video_config,
+ const CastInitializationCallback& initialization_cb,
const CreateVideoEncodeAcceleratorCallback& create_vea_cb,
const CreateVideoEncodeMemoryCallback& create_video_encode_mem_cb);
@@ -56,7 +57,9 @@ class ExternalVideoEncoder : public VideoEncoder {
scoped_refptr<base::SingleThreadTaskRunner> encoder_task_runner);
protected:
- void EncoderInitialized();
+ // If |success| is true then encoder is initialized successfully.
+ // Otherwise encoder initialization failed.
+ void EncoderInitialized(bool success);
void EncoderError();
private:
@@ -71,6 +74,8 @@ class ExternalVideoEncoder : public VideoEncoder {
scoped_refptr<LocalVideoEncodeAcceleratorClient> video_accelerator_client_;
scoped_refptr<base::SingleThreadTaskRunner> encoder_task_runner_;
+ CastInitializationCallback initialization_cb_;
+
// Weak pointer factory for posting back LocalVideoEncodeAcceleratorClient
// notifications to ExternalVideoEncoder.
// NOTE: Weak pointers must be invalidated before all other member variables.
diff --git a/media/cast/sender/external_video_encoder_unittest.cc b/media/cast/sender/external_video_encoder_unittest.cc
index 385b121695..2f6fa9e605 100644
--- a/media/cast/sender/external_video_encoder_unittest.cc
+++ b/media/cast/sender/external_video_encoder_unittest.cc
@@ -23,6 +23,8 @@ using testing::_;
namespace {
+void IgnoreInitializationStatus(CastInitializationStatus status) {}
+
class VEAFactory {
public:
VEAFactory(const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
@@ -117,7 +119,7 @@ class ExternalVideoEncoderTest : public ::testing::Test {
gfx::Size size(video_config_.width, video_config_.height);
video_frame_ = media::VideoFrame::CreateFrame(
VideoFrame::I420, size, gfx::Rect(size), size, base::TimeDelta());
- PopulateVideoFrame(video_frame_, 123);
+ PopulateVideoFrame(video_frame_.get(), 123);
testing_clock_ = new base::SimpleTestTickClock();
task_runner_ = new test::FakeSingleThreadTaskRunner(testing_clock_);
@@ -134,6 +136,7 @@ class ExternalVideoEncoderTest : public ::testing::Test {
video_encoder_.reset(new ExternalVideoEncoder(
cast_environment_,
video_config_,
+ base::Bind(&IgnoreInitializationStatus),
base::Bind(&VEAFactory::CreateVideoEncodeAccelerator,
base::Unretained(&vea_factory)),
base::Bind(&CreateSharedMemory)));
@@ -229,6 +232,7 @@ TEST(ExternalVideoEncoderEarlyDestroyTest, DestroyBeforeVEACreatedCallback) {
scoped_ptr<ExternalVideoEncoder> video_encoder(new ExternalVideoEncoder(
cast_environment,
video_config,
+ base::Bind(&IgnoreInitializationStatus),
base::Bind(&VEAFactory::CreateVideoEncodeAccelerator,
base::Unretained(&vea_factory)),
base::Bind(&CreateSharedMemory)));
diff --git a/media/cast/sender/frame_sender.cc b/media/cast/sender/frame_sender.cc
index b531a069fe..0b94f74a11 100644
--- a/media/cast/sender/frame_sender.cc
+++ b/media/cast/sender/frame_sender.cc
@@ -4,29 +4,56 @@
#include "media/cast/sender/frame_sender.h"
+#include "base/debug/trace_event.h"
+
namespace media {
namespace cast {
namespace {
+
const int kMinSchedulingDelayMs = 1;
+const int kNumAggressiveReportsSentAtStart = 100;
+
+// The additional number of frames that can be in-flight when input exceeds the
+// maximum frame rate.
+const int kMaxFrameBurst = 5;
+
} // namespace
+// Convenience macro used in logging statements throughout this file.
+#define SENDER_SSRC (is_audio_ ? "AUDIO[" : "VIDEO[") << ssrc_ << "] "
+
FrameSender::FrameSender(scoped_refptr<CastEnvironment> cast_environment,
+ bool is_audio,
CastTransportSender* const transport_sender,
base::TimeDelta rtcp_interval,
- int frequency,
+ int rtp_timebase,
uint32 ssrc,
double max_frame_rate,
- base::TimeDelta playout_delay)
+ base::TimeDelta min_playout_delay,
+ base::TimeDelta max_playout_delay,
+ CongestionControl* congestion_control)
: cast_environment_(cast_environment),
transport_sender_(transport_sender),
ssrc_(ssrc),
- rtp_timestamp_helper_(frequency),
- rtt_available_(false),
rtcp_interval_(rtcp_interval),
+ min_playout_delay_(min_playout_delay == base::TimeDelta() ?
+ max_playout_delay : min_playout_delay),
+ max_playout_delay_(max_playout_delay),
max_frame_rate_(max_frame_rate),
+ num_aggressive_rtcp_reports_sent_(0),
+ last_sent_frame_id_(0),
+ latest_acked_frame_id_(0),
+ duplicate_ack_counter_(0),
+ congestion_control_(congestion_control),
+ rtp_timebase_(rtp_timebase),
+ is_audio_(is_audio),
weak_factory_(this) {
- SetTargetPlayoutDelay(playout_delay);
+ DCHECK(transport_sender_);
+ DCHECK_GT(rtp_timebase_, 0);
+ DCHECK(congestion_control_);
+ SetTargetPlayoutDelay(min_playout_delay_);
send_target_playout_delay_ = false;
+ memset(frame_rtp_timestamps_, 0, sizeof(frame_rtp_timestamps_));
}
FrameSender::~FrameSender() {
@@ -49,32 +76,40 @@ void FrameSender::ScheduleNextRtcpReport() {
void FrameSender::SendRtcpReport(bool schedule_future_reports) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+
+ // Sanity-check: We should have sent at least the first frame by this point.
+ DCHECK(!last_send_time_.is_null());
+
+ // Create lip-sync info for the sender report. The last sent frame's
+ // reference time and RTP timestamp are used to estimate an RTP timestamp in
+ // terms of "now." Note that |now| is never likely to be precise to an exact
+ // frame boundary; and so the computation here will result in a
+ // |now_as_rtp_timestamp| value that is rarely equal to any one emitted by the
+ // encoder.
const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
- uint32 now_as_rtp_timestamp = 0;
- if (rtp_timestamp_helper_.GetCurrentTimeAsRtpTimestamp(
- now, &now_as_rtp_timestamp)) {
- transport_sender_->SendSenderReport(ssrc_, now, now_as_rtp_timestamp);
- } else {
- // |rtp_timestamp_helper_| should have stored a mapping by this point.
- NOTREACHED();
- }
+ const base::TimeDelta time_delta =
+ now - GetRecordedReferenceTime(last_sent_frame_id_);
+ const int64 rtp_delta = TimeDeltaToRtpDelta(time_delta, rtp_timebase_);
+ const uint32 now_as_rtp_timestamp =
+ GetRecordedRtpTimestamp(last_sent_frame_id_) +
+ static_cast<uint32>(rtp_delta);
+ transport_sender_->SendSenderReport(ssrc_, now, now_as_rtp_timestamp);
+
if (schedule_future_reports)
ScheduleNextRtcpReport();
}
-void FrameSender::OnReceivedRtt(base::TimeDelta rtt,
- base::TimeDelta avg_rtt,
- base::TimeDelta min_rtt,
- base::TimeDelta max_rtt) {
- rtt_available_ = true;
- rtt_ = rtt;
- avg_rtt_ = avg_rtt;
- min_rtt_ = min_rtt;
- max_rtt_ = max_rtt;
+void FrameSender::OnMeasuredRoundTripTime(base::TimeDelta rtt) {
+ DCHECK(rtt > base::TimeDelta());
+ current_round_trip_time_ = rtt;
}
void FrameSender::SetTargetPlayoutDelay(
base::TimeDelta new_target_playout_delay) {
+ new_target_playout_delay = std::max(new_target_playout_delay,
+ min_playout_delay_);
+ new_target_playout_delay = std::min(new_target_playout_delay,
+ max_playout_delay_);
target_playout_delay_ = new_target_playout_delay;
max_unacked_frames_ =
std::min(kMaxUnackedFrames,
@@ -84,5 +119,264 @@ void FrameSender::SetTargetPlayoutDelay(
send_target_playout_delay_ = true;
}
+void FrameSender::ResendCheck() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(!last_send_time_.is_null());
+ const base::TimeDelta time_since_last_send =
+ cast_environment_->Clock()->NowTicks() - last_send_time_;
+ if (time_since_last_send > target_playout_delay_) {
+ if (latest_acked_frame_id_ == last_sent_frame_id_) {
+ // Last frame acked, no point in doing anything
+ } else {
+ VLOG(1) << SENDER_SSRC << "ACK timeout; last acked frame: "
+ << latest_acked_frame_id_;
+ ResendForKickstart();
+ }
+ }
+ ScheduleNextResendCheck();
+}
+
+void FrameSender::ScheduleNextResendCheck() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(!last_send_time_.is_null());
+ base::TimeDelta time_to_next =
+ last_send_time_ - cast_environment_->Clock()->NowTicks() +
+ target_playout_delay_;
+ time_to_next = std::max(
+ time_to_next, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+ cast_environment_->PostDelayedTask(
+ CastEnvironment::MAIN,
+ FROM_HERE,
+ base::Bind(&FrameSender::ResendCheck, weak_factory_.GetWeakPtr()),
+ time_to_next);
+}
+
+void FrameSender::ResendForKickstart() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(!last_send_time_.is_null());
+ VLOG(1) << SENDER_SSRC << "Resending last packet of frame "
+ << last_sent_frame_id_ << " to kick-start.";
+ last_send_time_ = cast_environment_->Clock()->NowTicks();
+ transport_sender_->ResendFrameForKickstart(ssrc_, last_sent_frame_id_);
+}
+
+void FrameSender::RecordLatestFrameTimestamps(uint32 frame_id,
+ base::TimeTicks reference_time,
+ RtpTimestamp rtp_timestamp) {
+ DCHECK(!reference_time.is_null());
+ frame_reference_times_[frame_id % arraysize(frame_reference_times_)] =
+ reference_time;
+ frame_rtp_timestamps_[frame_id % arraysize(frame_rtp_timestamps_)] =
+ rtp_timestamp;
+}
+
+base::TimeTicks FrameSender::GetRecordedReferenceTime(uint32 frame_id) const {
+ return frame_reference_times_[frame_id % arraysize(frame_reference_times_)];
+}
+
+RtpTimestamp FrameSender::GetRecordedRtpTimestamp(uint32 frame_id) const {
+ return frame_rtp_timestamps_[frame_id % arraysize(frame_rtp_timestamps_)];
+}
+
+int FrameSender::GetUnacknowledgedFrameCount() const {
+ const int count =
+ static_cast<int32>(last_sent_frame_id_ - latest_acked_frame_id_);
+ DCHECK_GE(count, 0);
+ return count;
+}
+
+base::TimeDelta FrameSender::GetAllowedInFlightMediaDuration() const {
+ // The total amount allowed in-flight media should equal the amount that fits
+ // within the entire playout delay window, plus the amount of time it takes to
+ // receive an ACK from the receiver.
+ // TODO(miu): Research is needed, but there is likely a better formula.
+ return target_playout_delay_ + (current_round_trip_time_ / 2);
+}
+
+void FrameSender::SendEncodedFrame(
+ int requested_bitrate_before_encode,
+ scoped_ptr<EncodedFrame> encoded_frame) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+
+ VLOG(2) << SENDER_SSRC << "About to send another frame: last_sent="
+ << last_sent_frame_id_ << ", latest_acked=" << latest_acked_frame_id_;
+
+ const uint32 frame_id = encoded_frame->frame_id;
+
+ const bool is_first_frame_to_be_sent = last_send_time_.is_null();
+ last_send_time_ = cast_environment_->Clock()->NowTicks();
+ last_sent_frame_id_ = frame_id;
+ // If this is the first frame about to be sent, fake the value of
+ // |latest_acked_frame_id_| to indicate the receiver starts out all caught up.
+ // Also, schedule the periodic frame re-send checks.
+ if (is_first_frame_to_be_sent) {
+ latest_acked_frame_id_ = frame_id - 1;
+ ScheduleNextResendCheck();
+ }
+
+ VLOG_IF(1, !is_audio_ && encoded_frame->dependency == EncodedFrame::KEY)
+ << SENDER_SSRC << "Sending encoded key frame, id=" << frame_id;
+
+ cast_environment_->Logging()->InsertEncodedFrameEvent(
+ last_send_time_, FRAME_ENCODED,
+ is_audio_ ? AUDIO_EVENT : VIDEO_EVENT,
+ encoded_frame->rtp_timestamp,
+ frame_id, static_cast<int>(encoded_frame->data.size()),
+ encoded_frame->dependency == EncodedFrame::KEY,
+ requested_bitrate_before_encode);
+
+ RecordLatestFrameTimestamps(frame_id,
+ encoded_frame->reference_time,
+ encoded_frame->rtp_timestamp);
+
+ if (!is_audio_) {
+ // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
+ TRACE_EVENT_INSTANT1(
+ "cast_perf_test", "VideoFrameEncoded",
+ TRACE_EVENT_SCOPE_THREAD,
+ "rtp_timestamp", encoded_frame->rtp_timestamp);
+ }
+
+ // At the start of the session, it's important to send reports before each
+ // frame so that the receiver can properly compute playout times. The reason
+ // more than one report is sent is because transmission is not guaranteed,
+ // only best effort, so send enough that one should almost certainly get
+ // through.
+ if (num_aggressive_rtcp_reports_sent_ < kNumAggressiveReportsSentAtStart) {
+ // SendRtcpReport() will schedule future reports to be made if this is the
+ // last "aggressive report."
+ ++num_aggressive_rtcp_reports_sent_;
+ const bool is_last_aggressive_report =
+ (num_aggressive_rtcp_reports_sent_ == kNumAggressiveReportsSentAtStart);
+ VLOG_IF(1, is_last_aggressive_report)
+ << SENDER_SSRC << "Sending last aggressive report.";
+ SendRtcpReport(is_last_aggressive_report);
+ }
+
+ congestion_control_->SendFrameToTransport(
+ frame_id, encoded_frame->data.size() * 8, last_send_time_);
+
+ if (send_target_playout_delay_) {
+ encoded_frame->new_playout_delay_ms =
+ target_playout_delay_.InMilliseconds();
+ }
+ transport_sender_->InsertFrame(ssrc_, *encoded_frame);
+}
+
+void FrameSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+
+ const bool have_valid_rtt = current_round_trip_time_ > base::TimeDelta();
+ if (have_valid_rtt) {
+ congestion_control_->UpdateRtt(current_round_trip_time_);
+
+ // Having the RTT value implies the receiver sent back a receiver report
+ // based on it having received a report from here. Therefore, ensure this
+ // sender stops aggressively sending reports.
+ if (num_aggressive_rtcp_reports_sent_ < kNumAggressiveReportsSentAtStart) {
+ VLOG(1) << SENDER_SSRC
+ << "No longer a need to send reports aggressively (sent "
+ << num_aggressive_rtcp_reports_sent_ << ").";
+ num_aggressive_rtcp_reports_sent_ = kNumAggressiveReportsSentAtStart;
+ ScheduleNextRtcpReport();
+ }
+ }
+
+ if (last_send_time_.is_null())
+ return; // Cannot get an ACK without having first sent a frame.
+
+ if (cast_feedback.missing_frames_and_packets.empty()) {
+ OnAck(cast_feedback.ack_frame_id);
+
+ // We only count duplicate ACKs when we have sent newer frames.
+ if (latest_acked_frame_id_ == cast_feedback.ack_frame_id &&
+ latest_acked_frame_id_ != last_sent_frame_id_) {
+ duplicate_ack_counter_++;
+ } else {
+ duplicate_ack_counter_ = 0;
+ }
+ // TODO(miu): The values "2" and "3" should be derived from configuration.
+ if (duplicate_ack_counter_ >= 2 && duplicate_ack_counter_ % 3 == 2) {
+ VLOG(1) << SENDER_SSRC << "Received duplicate ACK for frame "
+ << latest_acked_frame_id_;
+ ResendForKickstart();
+ }
+ } else {
+ // Only count duplicated ACKs if there is no NACK request in between.
+ // This is to avoid aggresive resend.
+ duplicate_ack_counter_ = 0;
+ }
+
+ base::TimeTicks now = cast_environment_->Clock()->NowTicks();
+ congestion_control_->AckFrame(cast_feedback.ack_frame_id, now);
+
+ cast_environment_->Logging()->InsertFrameEvent(
+ now,
+ FRAME_ACK_RECEIVED,
+ is_audio_ ? AUDIO_EVENT : VIDEO_EVENT,
+ GetRecordedRtpTimestamp(cast_feedback.ack_frame_id),
+ cast_feedback.ack_frame_id);
+
+ const bool is_acked_out_of_order =
+ static_cast<int32>(cast_feedback.ack_frame_id -
+ latest_acked_frame_id_) < 0;
+ VLOG(2) << SENDER_SSRC
+ << "Received ACK" << (is_acked_out_of_order ? " out-of-order" : "")
+ << " for frame " << cast_feedback.ack_frame_id;
+ if (!is_acked_out_of_order) {
+ // Cancel resends of acked frames.
+ std::vector<uint32> cancel_sending_frames;
+ while (latest_acked_frame_id_ != cast_feedback.ack_frame_id) {
+ latest_acked_frame_id_++;
+ cancel_sending_frames.push_back(latest_acked_frame_id_);
+ }
+ transport_sender_->CancelSendingFrames(ssrc_, cancel_sending_frames);
+ latest_acked_frame_id_ = cast_feedback.ack_frame_id;
+ }
+}
+
+bool FrameSender::ShouldDropNextFrame(base::TimeDelta frame_duration) const {
+ // Check that accepting the next frame won't cause more frames to become
+ // in-flight than the system's design limit.
+ const int count_frames_in_flight =
+ GetUnacknowledgedFrameCount() + GetNumberOfFramesInEncoder();
+ if (count_frames_in_flight >= kMaxUnackedFrames) {
+ VLOG(1) << SENDER_SSRC << "Dropping: Too many frames would be in-flight.";
+ return true;
+ }
+
+ // Check that accepting the next frame won't exceed the configured maximum
+ // frame rate, allowing for short-term bursts.
+ base::TimeDelta duration_in_flight = GetInFlightMediaDuration();
+ const double max_frames_in_flight =
+ max_frame_rate_ * duration_in_flight.InSecondsF();
+ if (count_frames_in_flight >= max_frames_in_flight + kMaxFrameBurst) {
+ VLOG(1) << SENDER_SSRC << "Dropping: Burst threshold would be exceeded.";
+ return true;
+ }
+
+ // Check that accepting the next frame won't exceed the allowed in-flight
+ // media duration.
+ const base::TimeDelta duration_would_be_in_flight =
+ duration_in_flight + frame_duration;
+ const base::TimeDelta allowed_in_flight = GetAllowedInFlightMediaDuration();
+ if (VLOG_IS_ON(1)) {
+ const int64 percent = allowed_in_flight > base::TimeDelta() ?
+ 100 * duration_would_be_in_flight / allowed_in_flight : kint64max;
+ VLOG_IF(1, percent > 50)
+ << SENDER_SSRC
+ << duration_in_flight.InMicroseconds() << " usec in-flight + "
+ << frame_duration.InMicroseconds() << " usec for next frame --> "
+ << percent << "% of allowed in-flight.";
+ }
+ if (duration_would_be_in_flight > allowed_in_flight) {
+ VLOG(1) << SENDER_SSRC << "Dropping: In-flight duration would be too high.";
+ return true;
+ }
+
+ // Next frame is accepted.
+ return false;
+}
+
} // namespace cast
} // namespace media
diff --git a/media/cast/sender/frame_sender.h b/media/cast/sender/frame_sender.h
index 5da3927961..0e8595d213 100644
--- a/media/cast/sender/frame_sender.h
+++ b/media/cast/sender/frame_sender.h
@@ -15,7 +15,7 @@
#include "base/time/time.h"
#include "media/cast/cast_environment.h"
#include "media/cast/net/rtcp/rtcp.h"
-#include "media/cast/sender/rtp_timestamp_helper.h"
+#include "media/cast/sender/congestion_control.h"
namespace media {
namespace cast {
@@ -23,14 +23,19 @@ namespace cast {
class FrameSender {
public:
FrameSender(scoped_refptr<CastEnvironment> cast_environment,
+ bool is_audio,
CastTransportSender* const transport_sender,
base::TimeDelta rtcp_interval,
- int frequency,
+ int rtp_timebase,
uint32 ssrc,
double max_frame_rate,
- base::TimeDelta playout_delay);
+ base::TimeDelta min_playout_delay,
+ base::TimeDelta max_playout_delay,
+ CongestionControl* congestion_control);
virtual ~FrameSender();
+ int rtp_timebase() const { return rtp_timebase_; }
+
// Calling this function is only valid if the receiver supports the
// "extra_playout_delay", rtp extension.
void SetTargetPlayoutDelay(base::TimeDelta new_target_playout_delay);
@@ -39,17 +44,27 @@ class FrameSender {
return target_playout_delay_;
}
+ // Called by the encoder with the next EncodeFrame to send.
+ void SendEncodedFrame(int requested_bitrate_before_encode,
+ scoped_ptr<EncodedFrame> encoded_frame);
+
+ protected:
+ // Returns the number of frames in the encoder's backlog.
+ virtual int GetNumberOfFramesInEncoder() const = 0;
+
+ // Returns the duration of the data in the encoder's backlog plus the duration
+ // of sent, unacknowledged frames.
+ virtual base::TimeDelta GetInFlightMediaDuration() const = 0;
+
+ // Called when we get an ACK for a frame.
+ virtual void OnAck(uint32 frame_id) = 0;
+
protected:
// Schedule and execute periodic sending of RTCP report.
void ScheduleNextRtcpReport();
void SendRtcpReport(bool schedule_future_reports);
- void OnReceivedRtt(base::TimeDelta rtt,
- base::TimeDelta avg_rtt,
- base::TimeDelta min_rtt,
- base::TimeDelta max_rtt);
-
- bool is_rtt_available() const { return rtt_available_; }
+ void OnMeasuredRoundTripTime(base::TimeDelta rtt);
const scoped_refptr<CastEnvironment> cast_environment_;
@@ -62,18 +77,36 @@ class FrameSender {
const uint32 ssrc_;
- // Records lip-sync (i.e., mapping of RTP <--> NTP timestamps), and
- // extrapolates this mapping to any other point in time.
- RtpTimestampHelper rtp_timestamp_helper_;
-
- // RTT information from RTCP.
- bool rtt_available_;
- base::TimeDelta rtt_;
- base::TimeDelta avg_rtt_;
- base::TimeDelta min_rtt_;
- base::TimeDelta max_rtt_;
-
protected:
+ // Schedule and execute periodic checks for re-sending packets. If no
+ // acknowledgements have been received for "too long," AudioSender will
+ // speculatively re-send certain packets of an unacked frame to kick-start
+ // re-transmission. This is a last resort tactic to prevent the session from
+ // getting stuck after a long outage.
+ void ScheduleNextResendCheck();
+ void ResendCheck();
+ void ResendForKickstart();
+
+ // Protected for testability.
+ void OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback);
+
+ // Returns true if too many frames would be in-flight by encoding and sending
+ // the next frame having the given |frame_duration|.
+ bool ShouldDropNextFrame(base::TimeDelta frame_duration) const;
+
+ // Record or retrieve a recent history of each frame's timestamps.
+ // Warning: If a frame ID too far in the past is requested, the getters will
+ // silently succeed but return incorrect values. Be sure to respect
+ // media::cast::kMaxUnackedFrames.
+ void RecordLatestFrameTimestamps(uint32 frame_id,
+ base::TimeTicks reference_time,
+ RtpTimestamp rtp_timestamp);
+ base::TimeTicks GetRecordedReferenceTime(uint32 frame_id) const;
+ RtpTimestamp GetRecordedRtpTimestamp(uint32 frame_id) const;
+
+ // Returns the number of frames that were sent but not yet acknowledged.
+ int GetUnacknowledgedFrameCount() const;
+
const base::TimeDelta rtcp_interval_;
// The total amount of time between a frame's capture/recording on the sender
@@ -83,6 +116,8 @@ class FrameSender {
// environment (sender/receiver hardware performance, network conditions,
// etc.).
base::TimeDelta target_playout_delay_;
+ base::TimeDelta min_playout_delay_;
+ base::TimeDelta max_playout_delay_;
// If true, we transmit the target playout delay to the receiver.
bool send_target_playout_delay_;
@@ -94,7 +129,58 @@ class FrameSender {
// new frames shall halt.
int max_unacked_frames_;
+ // Counts how many RTCP reports are being "aggressively" sent (i.e., one per
+ // frame) at the start of the session. Once a threshold is reached, RTCP
+ // reports are instead sent at the configured interval + random drift.
+ int num_aggressive_rtcp_reports_sent_;
+
+ // This is "null" until the first frame is sent. Thereafter, this tracks the
+ // last time any frame was sent or re-sent.
+ base::TimeTicks last_send_time_;
+
+ // The ID of the last frame sent. Logic throughout FrameSender assumes this
+ // can safely wrap-around. This member is invalid until
+ // |!last_send_time_.is_null()|.
+ uint32 last_sent_frame_id_;
+
+ // The ID of the latest (not necessarily the last) frame that has been
+ // acknowledged. Logic throughout AudioSender assumes this can safely
+ // wrap-around. This member is invalid until |!last_send_time_.is_null()|.
+ uint32 latest_acked_frame_id_;
+
+ // Counts the number of duplicate ACK that are being received. When this
+ // number reaches a threshold, the sender will take this as a sign that the
+ // receiver hasn't yet received the first packet of the next frame. In this
+ // case, VideoSender will trigger a re-send of the next frame.
+ int duplicate_ack_counter_;
+
+ // If this sender is ready for use, this is STATUS_AUDIO_INITIALIZED or
+ // STATUS_VIDEO_INITIALIZED.
+ CastInitializationStatus cast_initialization_status_;
+
+ // This object controls how we change the bitrate to make sure the
+ // buffer doesn't overflow.
+ scoped_ptr<CongestionControl> congestion_control_;
+
+ // The most recently measured round trip time.
+ base::TimeDelta current_round_trip_time_;
+
private:
+ // Returns the maximum media duration currently allowed in-flight. This
+ // fluctuates in response to the currently-measured network latency.
+ base::TimeDelta GetAllowedInFlightMediaDuration() const;
+
+ // RTP timestamp increment representing one second.
+ const int rtp_timebase_;
+
+ const bool is_audio_;
+
+ // Ring buffers to keep track of recent frame timestamps (both in terms of
+ // local reference time and RTP media time). These should only be accessed
+ // through the Record/GetXXX() methods.
+ base::TimeTicks frame_reference_times_[256];
+ RtpTimestamp frame_rtp_timestamps_[256];
+
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<FrameSender> weak_factory_;
diff --git a/media/cast/sender/rtp_timestamp_helper.cc b/media/cast/sender/rtp_timestamp_helper.cc
deleted file mode 100644
index ea0c35c66f..0000000000
--- a/media/cast/sender/rtp_timestamp_helper.cc
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/cast/sender/rtp_timestamp_helper.h"
-
-namespace media {
-namespace cast {
-
-RtpTimestampHelper::RtpTimestampHelper(int frequency)
- : frequency_(frequency),
- last_rtp_timestamp_(0) {
-}
-
-RtpTimestampHelper::~RtpTimestampHelper() {
-}
-
-bool RtpTimestampHelper::GetCurrentTimeAsRtpTimestamp(
- const base::TimeTicks& now, uint32* rtp_timestamp) const {
- if (last_capture_time_.is_null())
- return false;
- const base::TimeDelta elapsed_time = now - last_capture_time_;
- const int64 rtp_delta =
- elapsed_time * frequency_ / base::TimeDelta::FromSeconds(1);
- *rtp_timestamp = last_rtp_timestamp_ + static_cast<uint32>(rtp_delta);
- return true;
-}
-
-void RtpTimestampHelper::StoreLatestTime(
- base::TimeTicks capture_time, uint32 rtp_timestamp) {
- last_capture_time_ = capture_time;
- last_rtp_timestamp_ = rtp_timestamp;
-}
-
-} // namespace cast
-} // namespace media
diff --git a/media/cast/sender/rtp_timestamp_helper.h b/media/cast/sender/rtp_timestamp_helper.h
deleted file mode 100644
index 8f56681dac..0000000000
--- a/media/cast/sender/rtp_timestamp_helper.h
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAST_SENDER_RTP_TIMESTAMP_HELPER_H_
-#define MEDIA_CAST_SENDER_RTP_TIMESTAMP_HELPER_H_
-
-#include "base/basictypes.h"
-#include "base/time/time.h"
-
-namespace media {
-namespace cast {
-
-// A helper class used to convert current time ticks into RTP timestamp.
-class RtpTimestampHelper {
- public:
- explicit RtpTimestampHelper(int frequency);
- ~RtpTimestampHelper();
-
- // Compute a RTP timestamp using current time, last encoded time and
- // last encoded RTP timestamp.
- // Return true if |rtp_timestamp| is computed.
- bool GetCurrentTimeAsRtpTimestamp(const base::TimeTicks& now,
- uint32* rtp_timestamp) const;
-
- // Store the capture time and the corresponding RTP timestamp for the
- // last encoded frame.
- void StoreLatestTime(base::TimeTicks capture_time, uint32 rtp_timestamp);
-
- private:
- int frequency_;
- base::TimeTicks last_capture_time_;
- uint32 last_rtp_timestamp_;
-
- DISALLOW_COPY_AND_ASSIGN(RtpTimestampHelper);
-};
-
-} // namespace cast
-} // namespace media
-
-#endif // MEDIA_CAST_SENDER_RTP_TIMESTAMP_HELPER_H_
diff --git a/media/cast/sender/video_encoder_impl.cc b/media/cast/sender/video_encoder_impl.cc
index d21649784e..4cbb769f0d 100644
--- a/media/cast/sender/video_encoder_impl.cc
+++ b/media/cast/sender/video_encoder_impl.cc
@@ -12,13 +12,16 @@
#include "media/base/video_frame.h"
#include "media/cast/cast_defines.h"
#include "media/cast/sender/fake_software_video_encoder.h"
+#if !defined(MEDIA_DISABLE_LIBVPX)
#include "media/cast/sender/vp8_encoder.h"
+#endif // !defined(MEDIA_DISABLE_LIBVPX)
namespace media {
namespace cast {
namespace {
+#if !defined(MEDIA_DISABLE_LIBVPX)
typedef base::Callback<void(Vp8Encoder*)> PassEncoderCallback;
void InitializeEncoderOnEncoderThread(
@@ -27,6 +30,7 @@ void InitializeEncoderOnEncoderThread(
DCHECK(environment->CurrentlyOn(CastEnvironment::VIDEO));
encoder->Initialize();
}
+#endif // !defined(MEDIA_DISABLE_LIBVPX)
void EncodeVideoFrameOnEncoderThread(
scoped_refptr<CastEnvironment> environment,
@@ -70,12 +74,14 @@ VideoEncoderImpl::VideoEncoderImpl(
int max_unacked_frames)
: cast_environment_(cast_environment) {
if (video_config.codec == CODEC_VIDEO_VP8) {
+#if !defined(MEDIA_DISABLE_LIBVPX)
encoder_.reset(new Vp8Encoder(video_config, max_unacked_frames));
cast_environment_->PostTask(CastEnvironment::VIDEO,
FROM_HERE,
base::Bind(&InitializeEncoderOnEncoderThread,
cast_environment,
encoder_.get()));
+#endif // !defined(MEDIA_DISABLE_LIBVPX)
#ifndef OFFICIAL_BUILD
} else if (video_config.codec == CODEC_VIDEO_FAKE) {
encoder_.reset(new FakeSoftwareVideoEncoder(video_config));
diff --git a/media/cast/sender/video_encoder_impl_unittest.cc b/media/cast/sender/video_encoder_impl_unittest.cc
index 190ca2aacc..43f7366883 100644
--- a/media/cast/sender/video_encoder_impl_unittest.cc
+++ b/media/cast/sender/video_encoder_impl_unittest.cc
@@ -12,6 +12,7 @@
#include "media/cast/cast_environment.h"
#include "media/cast/sender/video_encoder_impl.h"
#include "media/cast/test/fake_single_thread_task_runner.h"
+#include "media/cast/test/utility/default_config.h"
#include "media/cast/test/utility/video_utility.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -24,7 +25,13 @@ namespace {
class TestVideoEncoderCallback
: public base::RefCountedThreadSafe<TestVideoEncoderCallback> {
public:
- TestVideoEncoderCallback() {}
+ explicit TestVideoEncoderCallback(bool multiple_buffer_mode)
+ : multiple_buffer_mode_(multiple_buffer_mode),
+ count_frames_delivered_(0) {}
+
+ int count_frames_delivered() const {
+ return count_frames_delivered_;
+ }
void SetExpectedResult(uint32 expected_frame_id,
uint32 expected_last_referenced_frame_id,
@@ -36,23 +43,27 @@ class TestVideoEncoderCallback
void DeliverEncodedVideoFrame(
scoped_ptr<EncodedFrame> encoded_frame) {
- if (expected_frame_id_ == expected_last_referenced_frame_id_) {
+ if (expected_frame_id_ != expected_last_referenced_frame_id_) {
+ EXPECT_EQ(EncodedFrame::DEPENDENT, encoded_frame->dependency);
+ } else if (!multiple_buffer_mode_) {
EXPECT_EQ(EncodedFrame::KEY, encoded_frame->dependency);
- } else {
- EXPECT_EQ(EncodedFrame::DEPENDENT,
- encoded_frame->dependency);
}
EXPECT_EQ(expected_frame_id_, encoded_frame->frame_id);
EXPECT_EQ(expected_last_referenced_frame_id_,
- encoded_frame->referenced_frame_id);
+ encoded_frame->referenced_frame_id)
+ << "frame id: " << expected_frame_id_;
+ EXPECT_LT(0u, encoded_frame->rtp_timestamp);
EXPECT_EQ(expected_capture_time_, encoded_frame->reference_time);
+ EXPECT_FALSE(encoded_frame->data.empty());
+ ++count_frames_delivered_;
}
- protected:
- virtual ~TestVideoEncoderCallback() {}
-
private:
friend class base::RefCountedThreadSafe<TestVideoEncoderCallback>;
+ virtual ~TestVideoEncoderCallback() {}
+
+ const bool multiple_buffer_mode_;
+ int count_frames_delivered_;
uint32 expected_frame_id_;
uint32 expected_last_referenced_frame_id_;
@@ -64,32 +75,20 @@ class TestVideoEncoderCallback
class VideoEncoderImplTest : public ::testing::Test {
protected:
- VideoEncoderImplTest()
- : test_video_encoder_callback_(new TestVideoEncoderCallback()) {
- video_config_.ssrc = 1;
- video_config_.incoming_feedback_ssrc = 2;
- video_config_.rtp_payload_type = 127;
- video_config_.use_external_encoder = false;
- video_config_.width = 320;
- video_config_.height = 240;
- video_config_.max_bitrate = 5000000;
- video_config_.min_bitrate = 1000000;
- video_config_.start_bitrate = 2000000;
- video_config_.max_qp = 56;
- video_config_.min_qp = 0;
- video_config_.max_frame_rate = 30;
- video_config_.max_number_of_video_buffers_used = 3;
+ VideoEncoderImplTest() {
+ video_config_ = GetDefaultVideoSenderConfig();
video_config_.codec = CODEC_VIDEO_VP8;
gfx::Size size(video_config_.width, video_config_.height);
video_frame_ = media::VideoFrame::CreateFrame(
VideoFrame::I420, size, gfx::Rect(size), size, base::TimeDelta());
- PopulateVideoFrame(video_frame_, 123);
+ PopulateVideoFrame(video_frame_.get(), 123);
}
virtual ~VideoEncoderImplTest() {}
virtual void SetUp() OVERRIDE {
testing_clock_ = new base::SimpleTestTickClock();
+ testing_clock_->Advance(base::TimeTicks::Now() - base::TimeTicks());
task_runner_ = new test::FakeSingleThreadTaskRunner(testing_clock_);
cast_environment_ =
new CastEnvironment(scoped_ptr<base::TickClock>(testing_clock_).Pass(),
@@ -103,9 +102,12 @@ class VideoEncoderImplTest : public ::testing::Test {
task_runner_->RunTasks();
}
- void Configure(int max_unacked_frames) {
+ void CreateEncoder() {
+ test_video_encoder_callback_ = new TestVideoEncoderCallback(
+ video_config_.max_number_of_video_buffers_used != 1);
video_encoder_.reset(new VideoEncoderImpl(
- cast_environment_, video_config_, max_unacked_frames));
+ cast_environment_, video_config_,
+ 0 /* useless arg to be removed in later change */));
}
base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment.
@@ -120,140 +122,78 @@ class VideoEncoderImplTest : public ::testing::Test {
DISALLOW_COPY_AND_ASSIGN(VideoEncoderImplTest);
};
-TEST_F(VideoEncoderImplTest, EncodePattern30fpsRunningOutOfAck) {
- Configure(3);
+TEST_F(VideoEncoderImplTest, GeneratesKeyFrameThenOnlyDeltaFrames) {
+ CreateEncoder();
VideoEncoder::FrameEncodedCallback frame_encoded_callback =
base::Bind(&TestVideoEncoderCallback::DeliverEncodedVideoFrame,
test_video_encoder_callback_.get());
- base::TimeTicks capture_time;
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(0, 0, capture_time);
- EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
- task_runner_->RunTasks();
+ EXPECT_EQ(0, test_video_encoder_callback_->count_frames_delivered());
- capture_time += base::TimeDelta::FromMilliseconds(33);
- video_encoder_->LatestFrameIdToReference(0);
- test_video_encoder_callback_->SetExpectedResult(1, 0, capture_time);
+ test_video_encoder_callback_->SetExpectedResult(
+ 0, 0, testing_clock_->NowTicks());
EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
+ video_frame_, testing_clock_->NowTicks(), frame_encoded_callback));
task_runner_->RunTasks();
- capture_time += base::TimeDelta::FromMilliseconds(33);
- video_encoder_->LatestFrameIdToReference(1);
- test_video_encoder_callback_->SetExpectedResult(2, 1, capture_time);
- EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
- task_runner_->RunTasks();
-
- video_encoder_->LatestFrameIdToReference(2);
-
- for (int i = 3; i < 6; ++i) {
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(i, 2, capture_time);
+ for (uint32 frame_id = 1; frame_id < 10; ++frame_id) {
+ testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33));
+ test_video_encoder_callback_->SetExpectedResult(
+ frame_id, frame_id - 1, testing_clock_->NowTicks());
EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
+ video_frame_, testing_clock_->NowTicks(), frame_encoded_callback));
task_runner_->RunTasks();
}
-}
-
-// TODO(pwestin): Re-enabled after redesign the encoder to control number of
-// frames in flight.
-TEST_F(VideoEncoderImplTest, DISABLED_EncodePattern60fpsRunningOutOfAck) {
- video_config_.max_number_of_video_buffers_used = 1;
- Configure(6);
-
- base::TimeTicks capture_time;
- VideoEncoder::FrameEncodedCallback frame_encoded_callback =
- base::Bind(&TestVideoEncoderCallback::DeliverEncodedVideoFrame,
- test_video_encoder_callback_.get());
-
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(0, 0, capture_time);
- EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
- task_runner_->RunTasks();
-
- video_encoder_->LatestFrameIdToReference(0);
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(1, 0, capture_time);
- EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
- task_runner_->RunTasks();
-
- video_encoder_->LatestFrameIdToReference(1);
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(2, 0, capture_time);
- EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
- task_runner_->RunTasks();
-
- video_encoder_->LatestFrameIdToReference(2);
- for (int i = 3; i < 9; ++i) {
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(i, 2, capture_time);
- EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
- task_runner_->RunTasks();
- }
+ EXPECT_EQ(10, test_video_encoder_callback_->count_frames_delivered());
}
-// TODO(pwestin): Re-enabled after redesign the encoder to control number of
-// frames in flight.
TEST_F(VideoEncoderImplTest,
- DISABLED_EncodePattern60fps200msDelayRunningOutOfAck) {
- Configure(12);
+ FramesDoNotDependOnUnackedFramesInMultiBufferMode) {
+ video_config_.max_number_of_video_buffers_used = 3;
+ CreateEncoder();
- base::TimeTicks capture_time;
VideoEncoder::FrameEncodedCallback frame_encoded_callback =
base::Bind(&TestVideoEncoderCallback::DeliverEncodedVideoFrame,
test_video_encoder_callback_.get());
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(0, 0, capture_time);
+ EXPECT_EQ(0, test_video_encoder_callback_->count_frames_delivered());
+
+ test_video_encoder_callback_->SetExpectedResult(
+ 0, 0, testing_clock_->NowTicks());
EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
+ video_frame_, testing_clock_->NowTicks(), frame_encoded_callback));
task_runner_->RunTasks();
+ testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33));
video_encoder_->LatestFrameIdToReference(0);
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(1, 0, capture_time);
+ test_video_encoder_callback_->SetExpectedResult(
+ 1, 0, testing_clock_->NowTicks());
EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
+ video_frame_, testing_clock_->NowTicks(), frame_encoded_callback));
task_runner_->RunTasks();
+ testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33));
video_encoder_->LatestFrameIdToReference(1);
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(2, 0, capture_time);
+ test_video_encoder_callback_->SetExpectedResult(
+ 2, 1, testing_clock_->NowTicks());
EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
+ video_frame_, testing_clock_->NowTicks(), frame_encoded_callback));
task_runner_->RunTasks();
video_encoder_->LatestFrameIdToReference(2);
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(3, 0, capture_time);
- EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
- task_runner_->RunTasks();
- video_encoder_->LatestFrameIdToReference(3);
- capture_time += base::TimeDelta::FromMilliseconds(33);
- test_video_encoder_callback_->SetExpectedResult(4, 0, capture_time);
- EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
- task_runner_->RunTasks();
-
- video_encoder_->LatestFrameIdToReference(4);
-
- for (int i = 5; i < 17; ++i) {
- test_video_encoder_callback_->SetExpectedResult(i, 4, capture_time);
+ for (uint32 frame_id = 3; frame_id < 10; ++frame_id) {
+ testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33));
+ test_video_encoder_callback_->SetExpectedResult(
+ frame_id, 2, testing_clock_->NowTicks());
EXPECT_TRUE(video_encoder_->EncodeVideoFrame(
- video_frame_, capture_time, frame_encoded_callback));
+ video_frame_, testing_clock_->NowTicks(), frame_encoded_callback));
task_runner_->RunTasks();
}
+
+ EXPECT_EQ(10, test_video_encoder_callback_->count_frames_delivered());
}
} // namespace cast
diff --git a/media/cast/sender/video_sender.cc b/media/cast/sender/video_sender.cc
index f0e3a1440c..784e8c6a34 100644
--- a/media/cast/sender/video_sender.cc
+++ b/media/cast/sender/video_sender.cc
@@ -19,49 +19,50 @@
namespace media {
namespace cast {
-const int kNumAggressiveReportsSentAtStart = 100;
-const int kMinSchedulingDelayMs = 1;
-
namespace {
-// Returns a fixed bitrate value when external video encoder is used.
-// Some hardware encoder shows bad behavior if we set the bitrate too
-// frequently, e.g. quality drop, not abiding by target bitrate, etc.
-// See details: crbug.com/392086.
-size_t GetFixedBitrate(const VideoSenderConfig& video_config) {
- if (!video_config.use_external_encoder)
- return 0;
- return (video_config.min_bitrate + video_config.max_bitrate) / 2;
-}
+// The following two constants are used to adjust the target
+// playout delay (when allowed). They were calculated using
+// a combination of cast_benchmark runs and manual testing.
+//
+// This is how many round trips we think we need on the network.
+const int kRoundTripsNeeded = 4;
+// This is an estimate of all the the constant time needed independent of
+// network quality (e.g., additional time that accounts for encode and decode
+// time).
+const int kConstantTimeMs = 75;
} // namespace
+// Note, we use a fixed bitrate value when external video encoder is used.
+// Some hardware encoder shows bad behavior if we set the bitrate too
+// frequently, e.g. quality drop, not abiding by target bitrate, etc.
+// See details: crbug.com/392086.
VideoSender::VideoSender(
scoped_refptr<CastEnvironment> cast_environment,
const VideoSenderConfig& video_config,
+ const CastInitializationCallback& initialization_cb,
const CreateVideoEncodeAcceleratorCallback& create_vea_cb,
const CreateVideoEncodeMemoryCallback& create_video_encode_mem_cb,
- CastTransportSender* const transport_sender)
+ CastTransportSender* const transport_sender,
+ const PlayoutDelayChangeCB& playout_delay_change_cb)
: FrameSender(
cast_environment,
+ false,
transport_sender,
base::TimeDelta::FromMilliseconds(video_config.rtcp_interval),
kVideoFrequency,
video_config.ssrc,
video_config.max_frame_rate,
- video_config.target_playout_delay),
- fixed_bitrate_(GetFixedBitrate(video_config)),
- num_aggressive_rtcp_reports_sent_(0),
+ video_config.min_playout_delay,
+ video_config.max_playout_delay,
+ NewFixedCongestionControl(
+ (video_config.min_bitrate + video_config.max_bitrate) / 2)),
frames_in_encoder_(0),
- last_sent_frame_id_(0),
- latest_acked_frame_id_(0),
- duplicate_ack_counter_(0),
- congestion_control_(cast_environment->Clock(),
- video_config.max_bitrate,
- video_config.min_bitrate,
- max_unacked_frames_),
- cast_initialization_status_(STATUS_VIDEO_UNINITIALIZED),
+ last_bitrate_(0),
+ playout_delay_change_cb_(playout_delay_change_cb),
weak_factory_(this) {
+ cast_initialization_status_ = STATUS_VIDEO_UNINITIALIZED;
VLOG(1) << "max_unacked_frames is " << max_unacked_frames_
<< " for target_playout_delay="
<< target_playout_delay_.InMilliseconds() << " ms"
@@ -69,21 +70,36 @@ VideoSender::VideoSender(
DCHECK_GT(max_unacked_frames_, 0);
if (video_config.use_external_encoder) {
- video_encoder_.reset(new ExternalVideoEncoder(cast_environment,
- video_config,
- create_vea_cb,
- create_video_encode_mem_cb));
+ video_encoder_.reset(new ExternalVideoEncoder(
+ cast_environment,
+ video_config,
+ base::Bind(&VideoSender::OnEncoderInitialized,
+ weak_factory_.GetWeakPtr(), initialization_cb),
+ create_vea_cb,
+ create_video_encode_mem_cb));
} else {
+ // Software encoder is initialized immediately.
+ congestion_control_.reset(
+ NewAdaptiveCongestionControl(cast_environment->Clock(),
+ video_config.max_bitrate,
+ video_config.min_bitrate,
+ max_unacked_frames_));
video_encoder_.reset(new VideoEncoderImpl(
cast_environment, video_config, max_unacked_frames_));
+ cast_initialization_status_ = STATUS_VIDEO_INITIALIZED;
+ }
+
+ if (cast_initialization_status_ == STATUS_VIDEO_INITIALIZED) {
+ cast_environment->PostTask(
+ CastEnvironment::MAIN,
+ FROM_HERE,
+ base::Bind(initialization_cb, cast_initialization_status_));
}
- cast_initialization_status_ = STATUS_VIDEO_INITIALIZED;
media::cast::CastTransportRtpConfig transport_config;
transport_config.ssrc = video_config.ssrc;
transport_config.feedback_ssrc = video_config.incoming_feedback_ssrc;
transport_config.rtp_payload_type = video_config.rtp_payload_type;
- transport_config.stored_frames = max_unacked_frames_;
transport_config.aes_key = video_config.aes_key;
transport_config.aes_iv_mask = video_config.aes_iv_mask;
@@ -91,9 +107,8 @@ VideoSender::VideoSender(
transport_config,
base::Bind(&VideoSender::OnReceivedCastFeedback,
weak_factory_.GetWeakPtr()),
- base::Bind(&VideoSender::OnReceivedRtt, weak_factory_.GetWeakPtr()));
-
- memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_));
+ base::Bind(&VideoSender::OnMeasuredRoundTripTime,
+ weak_factory_.GetWeakPtr()));
}
VideoSender::~VideoSender() {
@@ -126,239 +141,95 @@ void VideoSender::InsertRawVideoFrame(
"timestamp", capture_time.ToInternalValue(),
"rtp_timestamp", rtp_timestamp);
- if (AreTooManyFramesInFlight()) {
- VLOG(1) << "Dropping frame due to too many frames currently in-flight.";
+ // Drop the frame if its reference timestamp is not an increase over the last
+ // frame's. This protects: 1) the duration calculations that assume
+ // timestamps are monotonically non-decreasing, and 2) assumptions made deeper
+ // in the implementation where each frame's RTP timestamp needs to be unique.
+ if (!last_enqueued_frame_reference_time_.is_null() &&
+ capture_time <= last_enqueued_frame_reference_time_) {
+ VLOG(1) << "Dropping video frame: Reference time did not increase.";
+ return;
+ }
+
+ // Two video frames are needed to compute the exact media duration added by
+ // the next frame. If there are no frames in the encoder, compute a guess
+ // based on the configured |max_frame_rate_|. Any error introduced by this
+ // guess will be eliminated when |duration_in_encoder_| is updated in
+ // OnEncodedVideoFrame().
+ const base::TimeDelta duration_added_by_next_frame = frames_in_encoder_ > 0 ?
+ capture_time - last_enqueued_frame_reference_time_ :
+ base::TimeDelta::FromSecondsD(1.0 / max_frame_rate_);
+
+ if (ShouldDropNextFrame(duration_added_by_next_frame)) {
+ base::TimeDelta new_target_delay = std::min(
+ current_round_trip_time_ * kRoundTripsNeeded +
+ base::TimeDelta::FromMilliseconds(kConstantTimeMs),
+ max_playout_delay_);
+ if (new_target_delay > target_playout_delay_) {
+ VLOG(1) << "New target delay: " << new_target_delay.InMilliseconds();
+ playout_delay_change_cb_.Run(new_target_delay);
+ }
return;
}
- uint32 bitrate = fixed_bitrate_;
- if (!bitrate) {
- bitrate = congestion_control_.GetBitrate(
+ uint32 bitrate = congestion_control_->GetBitrate(
capture_time + target_playout_delay_, target_playout_delay_);
- DCHECK(bitrate);
- video_encoder_->SetBitRate(bitrate);
- } else if (last_send_time_.is_null()) {
- // Set the fixed bitrate value to codec until a frame is sent. We might
- // set this value a couple times at the very beginning of the stream but
- // it is not harmful.
+ if (bitrate != last_bitrate_) {
video_encoder_->SetBitRate(bitrate);
+ last_bitrate_ = bitrate;
}
if (video_encoder_->EncodeVideoFrame(
video_frame,
capture_time,
- base::Bind(&VideoSender::SendEncodedVideoFrame,
+ base::Bind(&VideoSender::OnEncodedVideoFrame,
weak_factory_.GetWeakPtr(),
bitrate))) {
frames_in_encoder_++;
+ duration_in_encoder_ += duration_added_by_next_frame;
+ last_enqueued_frame_reference_time_ = capture_time;
} else {
VLOG(1) << "Encoder rejected a frame. Skipping...";
}
}
-void VideoSender::SendEncodedVideoFrame(
- int requested_bitrate_before_encode,
- scoped_ptr<EncodedFrame> encoded_frame) {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
-
- DCHECK_GT(frames_in_encoder_, 0);
- frames_in_encoder_--;
-
- const uint32 frame_id = encoded_frame->frame_id;
-
- const bool is_first_frame_to_be_sent = last_send_time_.is_null();
- last_send_time_ = cast_environment_->Clock()->NowTicks();
- last_sent_frame_id_ = frame_id;
- // If this is the first frame about to be sent, fake the value of
- // |latest_acked_frame_id_| to indicate the receiver starts out all caught up.
- // Also, schedule the periodic frame re-send checks.
- if (is_first_frame_to_be_sent) {
- latest_acked_frame_id_ = frame_id - 1;
- ScheduleNextResendCheck();
- }
-
- VLOG_IF(1, encoded_frame->dependency == EncodedFrame::KEY)
- << "Send encoded key frame; frame_id: " << frame_id;
-
- cast_environment_->Logging()->InsertEncodedFrameEvent(
- last_send_time_, FRAME_ENCODED, VIDEO_EVENT, encoded_frame->rtp_timestamp,
- frame_id, static_cast<int>(encoded_frame->data.size()),
- encoded_frame->dependency == EncodedFrame::KEY,
- requested_bitrate_before_encode);
- // Only use lowest 8 bits as key.
- frame_id_to_rtp_timestamp_[frame_id & 0xff] = encoded_frame->rtp_timestamp;
-
- // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
- TRACE_EVENT_INSTANT1(
- "cast_perf_test", "VideoFrameEncoded",
- TRACE_EVENT_SCOPE_THREAD,
- "rtp_timestamp", encoded_frame->rtp_timestamp);
-
- DCHECK(!encoded_frame->reference_time.is_null());
- rtp_timestamp_helper_.StoreLatestTime(encoded_frame->reference_time,
- encoded_frame->rtp_timestamp);
-
- // At the start of the session, it's important to send reports before each
- // frame so that the receiver can properly compute playout times. The reason
- // more than one report is sent is because transmission is not guaranteed,
- // only best effort, so send enough that one should almost certainly get
- // through.
- if (num_aggressive_rtcp_reports_sent_ < kNumAggressiveReportsSentAtStart) {
- // SendRtcpReport() will schedule future reports to be made if this is the
- // last "aggressive report."
- ++num_aggressive_rtcp_reports_sent_;
- const bool is_last_aggressive_report =
- (num_aggressive_rtcp_reports_sent_ == kNumAggressiveReportsSentAtStart);
- VLOG_IF(1, is_last_aggressive_report) << "Sending last aggressive report.";
- SendRtcpReport(is_last_aggressive_report);
- }
-
- congestion_control_.SendFrameToTransport(
- frame_id, encoded_frame->data.size() * 8, last_send_time_);
+int VideoSender::GetNumberOfFramesInEncoder() const {
+ return frames_in_encoder_;
+}
- if (send_target_playout_delay_) {
- encoded_frame->new_playout_delay_ms =
- target_playout_delay_.InMilliseconds();
+base::TimeDelta VideoSender::GetInFlightMediaDuration() const {
+ if (GetUnacknowledgedFrameCount() > 0) {
+ const uint32 oldest_unacked_frame_id = latest_acked_frame_id_ + 1;
+ return last_enqueued_frame_reference_time_ -
+ GetRecordedReferenceTime(oldest_unacked_frame_id);
+ } else {
+ return duration_in_encoder_;
}
- transport_sender_->InsertCodedVideoFrame(*encoded_frame);
}
-void VideoSender::ResendCheck() {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- DCHECK(!last_send_time_.is_null());
- const base::TimeDelta time_since_last_send =
- cast_environment_->Clock()->NowTicks() - last_send_time_;
- if (time_since_last_send > target_playout_delay_) {
- if (latest_acked_frame_id_ == last_sent_frame_id_) {
- // Last frame acked, no point in doing anything
- } else {
- VLOG(1) << "ACK timeout; last acked frame: " << latest_acked_frame_id_;
- ResendForKickstart();
- }
- }
- ScheduleNextResendCheck();
+void VideoSender::OnAck(uint32 frame_id) {
+ video_encoder_->LatestFrameIdToReference(frame_id);
}
-void VideoSender::ScheduleNextResendCheck() {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- DCHECK(!last_send_time_.is_null());
- base::TimeDelta time_to_next =
- last_send_time_ - cast_environment_->Clock()->NowTicks() +
- target_playout_delay_;
- time_to_next = std::max(
- time_to_next, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
- cast_environment_->PostDelayedTask(
- CastEnvironment::MAIN,
- FROM_HERE,
- base::Bind(&VideoSender::ResendCheck, weak_factory_.GetWeakPtr()),
- time_to_next);
+void VideoSender::OnEncoderInitialized(
+ const CastInitializationCallback& initialization_cb,
+ CastInitializationStatus status) {
+ cast_initialization_status_ = status;
+ initialization_cb.Run(status);
}
-void VideoSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
+void VideoSender::OnEncodedVideoFrame(
+ int encoder_bitrate,
+ scoped_ptr<EncodedFrame> encoded_frame) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- base::TimeDelta rtt;
- base::TimeDelta avg_rtt;
- base::TimeDelta min_rtt;
- base::TimeDelta max_rtt;
- if (is_rtt_available()) {
- rtt = rtt_;
- avg_rtt = avg_rtt_;
- min_rtt = min_rtt_;
- max_rtt = max_rtt_;
-
- congestion_control_.UpdateRtt(rtt);
-
- // Don't use a RTT lower than our average.
- rtt = std::max(rtt, avg_rtt);
-
- // Having the RTT values implies the receiver sent back a receiver report
- // based on it having received a report from here. Therefore, ensure this
- // sender stops aggressively sending reports.
- if (num_aggressive_rtcp_reports_sent_ < kNumAggressiveReportsSentAtStart) {
- VLOG(1) << "No longer a need to send reports aggressively (sent "
- << num_aggressive_rtcp_reports_sent_ << ").";
- num_aggressive_rtcp_reports_sent_ = kNumAggressiveReportsSentAtStart;
- ScheduleNextRtcpReport();
- }
- } else {
- // We have no measured value use default.
- rtt = base::TimeDelta::FromMilliseconds(kStartRttMs);
- }
-
- if (last_send_time_.is_null())
- return; // Cannot get an ACK without having first sent a frame.
-
- if (cast_feedback.missing_frames_and_packets.empty()) {
- video_encoder_->LatestFrameIdToReference(cast_feedback.ack_frame_id);
-
- // We only count duplicate ACKs when we have sent newer frames.
- if (latest_acked_frame_id_ == cast_feedback.ack_frame_id &&
- latest_acked_frame_id_ != last_sent_frame_id_) {
- duplicate_ack_counter_++;
- } else {
- duplicate_ack_counter_ = 0;
- }
- // TODO(miu): The values "2" and "3" should be derived from configuration.
- if (duplicate_ack_counter_ >= 2 && duplicate_ack_counter_ % 3 == 2) {
- VLOG(1) << "Received duplicate ACK for frame " << latest_acked_frame_id_;
- ResendForKickstart();
- }
- } else {
- // Only count duplicated ACKs if there is no NACK request in between.
- // This is to avoid aggresive resend.
- duplicate_ack_counter_ = 0;
- }
-
- base::TimeTicks now = cast_environment_->Clock()->NowTicks();
- congestion_control_.AckFrame(cast_feedback.ack_frame_id, now);
-
- RtpTimestamp rtp_timestamp =
- frame_id_to_rtp_timestamp_[cast_feedback.ack_frame_id & 0xff];
- cast_environment_->Logging()->InsertFrameEvent(now,
- FRAME_ACK_RECEIVED,
- VIDEO_EVENT,
- rtp_timestamp,
- cast_feedback.ack_frame_id);
-
- const bool is_acked_out_of_order =
- static_cast<int32>(cast_feedback.ack_frame_id -
- latest_acked_frame_id_) < 0;
- VLOG(2) << "Received ACK" << (is_acked_out_of_order ? " out-of-order" : "")
- << " for frame " << cast_feedback.ack_frame_id;
- if (!is_acked_out_of_order) {
- // Cancel resends of acked frames.
- std::vector<uint32> cancel_sending_frames;
- while (latest_acked_frame_id_ != cast_feedback.ack_frame_id) {
- latest_acked_frame_id_++;
- cancel_sending_frames.push_back(latest_acked_frame_id_);
- }
- transport_sender_->CancelSendingFrames(ssrc_, cancel_sending_frames);
- latest_acked_frame_id_ = cast_feedback.ack_frame_id;
- }
-}
+ frames_in_encoder_--;
+ DCHECK_GE(frames_in_encoder_, 0);
-bool VideoSender::AreTooManyFramesInFlight() const {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- int frames_in_flight = frames_in_encoder_;
- if (!last_send_time_.is_null()) {
- frames_in_flight +=
- static_cast<int32>(last_sent_frame_id_ - latest_acked_frame_id_);
- }
- VLOG(2) << frames_in_flight
- << " frames in flight; last sent: " << last_sent_frame_id_
- << " latest acked: " << latest_acked_frame_id_
- << " frames in encoder: " << frames_in_encoder_;
- return frames_in_flight >= max_unacked_frames_;
-}
+ duration_in_encoder_ =
+ last_enqueued_frame_reference_time_ - encoded_frame->reference_time;
-void VideoSender::ResendForKickstart() {
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- DCHECK(!last_send_time_.is_null());
- VLOG(1) << "Resending last packet of frame " << last_sent_frame_id_
- << " to kick-start.";
- last_send_time_ = cast_environment_->Clock()->NowTicks();
- transport_sender_->ResendFrameForKickstart(ssrc_, last_sent_frame_id_);
+ SendEncodedFrame(encoder_bitrate, encoded_frame.Pass());
}
} // namespace cast
diff --git a/media/cast/sender/video_sender.h b/media/cast/sender/video_sender.h
index ebada4d26a..e7658abeea 100644
--- a/media/cast/sender/video_sender.h
+++ b/media/cast/sender/video_sender.h
@@ -25,6 +25,8 @@ namespace cast {
class CastTransportSender;
class VideoEncoder;
+typedef base::Callback<void(base::TimeDelta)> PlayoutDelayChangeCB;
+
// Not thread safe. Only called from the main cast thread.
// This class owns all objects related to sending video, objects that create RTP
// packets, congestion control, video encoder, parsing and sending of
@@ -37,16 +39,14 @@ class VideoSender : public FrameSender,
public:
VideoSender(scoped_refptr<CastEnvironment> cast_environment,
const VideoSenderConfig& video_config,
+ const CastInitializationCallback& initialization_cb,
const CreateVideoEncodeAcceleratorCallback& create_vea_cb,
const CreateVideoEncodeMemoryCallback& create_video_encode_mem_cb,
- CastTransportSender* const transport_sender);
+ CastTransportSender* const transport_sender,
+ const PlayoutDelayChangeCB& playout_delay_change_cb);
virtual ~VideoSender();
- CastInitializationStatus InitializationResult() const {
- return cast_initialization_status_;
- }
-
// Note: It is not guaranteed that |video_frame| will actually be encoded and
// sent, if VideoSender detects too many frames in flight. Therefore, clients
// should be careful about the rate at which this method is called.
@@ -57,78 +57,39 @@ class VideoSender : public FrameSender,
const base::TimeTicks& capture_time);
protected:
- // Protected for testability.
- void OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback);
+ virtual int GetNumberOfFramesInEncoder() const OVERRIDE;
+ virtual base::TimeDelta GetInFlightMediaDuration() const OVERRIDE;
+ virtual void OnAck(uint32 frame_id) OVERRIDE;
private:
- // Schedule and execute periodic checks for re-sending packets. If no
- // acknowledgements have been received for "too long," VideoSender will
- // speculatively re-send certain packets of an unacked frame to kick-start
- // re-transmission. This is a last resort tactic to prevent the session from
- // getting stuck after a long outage.
- void ScheduleNextResendCheck();
- void ResendCheck();
- void ResendForKickstart();
-
- // Returns true if there are too many frames in flight, as defined by the
- // configured target playout delay plus simple logic. When this is true,
- // InsertRawVideoFrame() will silenty drop frames instead of sending them to
- // the video encoder.
- bool AreTooManyFramesInFlight() const;
-
- // Called by the |video_encoder_| with the next EncodeFrame to send.
- void SendEncodedVideoFrame(int requested_bitrate_before_encode,
- scoped_ptr<EncodedFrame> encoded_frame);
- // If this value is non zero then a fixed value is used for bitrate.
- // If external video encoder is used then bitrate will be fixed to
- // (min_bitrate + max_bitrate) / 2.
- const size_t fixed_bitrate_;
+ // Called when the encoder is initialized or has failed to initialize.
+ void OnEncoderInitialized(
+ const CastInitializationCallback& initialization_cb,
+ CastInitializationStatus status);
+
+ // Called by the |video_encoder_| with the next EncodedFrame to send.
+ void OnEncodedVideoFrame(int encoder_bitrate,
+ scoped_ptr<EncodedFrame> encoded_frame);
// Encodes media::VideoFrame images into EncodedFrames. Per configuration,
// this will point to either the internal software-based encoder or a proxy to
// a hardware-based encoder.
scoped_ptr<VideoEncoder> video_encoder_;
- // Counts how many RTCP reports are being "aggressively" sent (i.e., one per
- // frame) at the start of the session. Once a threshold is reached, RTCP
- // reports are instead sent at the configured interval + random drift.
- int num_aggressive_rtcp_reports_sent_;
-
- // The number of frames currently being processed in |video_encoder_|.
+ // The number of frames queued for encoding, but not yet sent.
int frames_in_encoder_;
- // This is "null" until the first frame is sent. Thereafter, this tracks the
- // last time any frame was sent or re-sent.
- base::TimeTicks last_send_time_;
-
- // The ID of the last frame sent. Logic throughout VideoSender assumes this
- // can safely wrap-around. This member is invalid until
- // |!last_send_time_.is_null()|.
- uint32 last_sent_frame_id_;
-
- // The ID of the latest (not necessarily the last) frame that has been
- // acknowledged. Logic throughout VideoSender assumes this can safely
- // wrap-around. This member is invalid until |!last_send_time_.is_null()|.
- uint32 latest_acked_frame_id_;
-
- // Counts the number of duplicate ACK that are being received. When this
- // number reaches a threshold, the sender will take this as a sign that the
- // receiver hasn't yet received the first packet of the next frame. In this
- // case, VideoSender will trigger a re-send of the next frame.
- int duplicate_ack_counter_;
-
- // When we get close to the max number of un-acked frames, we set lower
- // the bitrate drastically to ensure that we catch up. Without this we
- // risk getting stuck in a catch-up state forever.
- CongestionControl congestion_control_;
-
- // If this sender is ready for use, this is STATUS_VIDEO_INITIALIZED.
- CastInitializationStatus cast_initialization_status_;
-
- // This is a "good enough" mapping for finding the RTP timestamp associated
- // with a video frame. The key is the lowest 8 bits of frame id (which is
- // what is sent via RTCP). This map is used for logging purposes.
- RtpTimestamp frame_id_to_rtp_timestamp_[256];
+ // The duration of video queued for encoding, but not yet sent.
+ base::TimeDelta duration_in_encoder_;
+
+ // The timestamp of the frame that was last enqueued in |video_encoder_|.
+ base::TimeTicks last_enqueued_frame_reference_time_;
+
+ // Remember what we set the bitrate to before, no need to set it again if
+ // we get the same value.
+ uint32 last_bitrate_;
+
+ PlayoutDelayChangeCB playout_delay_change_cb_;
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<VideoSender> weak_factory_;
diff --git a/media/cast/sender/video_sender_unittest.cc b/media/cast/sender/video_sender_unittest.cc
index b9620fcdb2..be6594443c 100644
--- a/media/cast/sender/video_sender_unittest.cc
+++ b/media/cast/sender/video_sender_unittest.cc
@@ -51,6 +51,11 @@ void CreateSharedMemory(
callback.Run(shm.Pass());
}
+void SaveInitializationStatus(CastInitializationStatus* out_status,
+ CastInitializationStatus in_status) {
+ *out_status = in_status;
+}
+
class TestPacketSender : public PacketSender {
public:
TestPacketSender()
@@ -90,7 +95,7 @@ class TestPacketSender : public PacketSender {
void SetPause(bool paused) {
paused_ = paused;
- if (!paused && stored_packet_) {
+ if (!paused && stored_packet_.get()) {
SendPacket(stored_packet_, callback_);
callback_.Run();
}
@@ -106,19 +111,24 @@ class TestPacketSender : public PacketSender {
DISALLOW_COPY_AND_ASSIGN(TestPacketSender);
};
+void IgnorePlayoutDelayChanges(base::TimeDelta unused_playout_delay) {
+}
class PeerVideoSender : public VideoSender {
public:
PeerVideoSender(
scoped_refptr<CastEnvironment> cast_environment,
const VideoSenderConfig& video_config,
+ const CastInitializationCallback& initialization_cb,
const CreateVideoEncodeAcceleratorCallback& create_vea_cb,
const CreateVideoEncodeMemoryCallback& create_video_encode_mem_cb,
CastTransportSender* const transport_sender)
: VideoSender(cast_environment,
video_config,
+ initialization_cb,
create_vea_cb,
create_video_encode_mem_cb,
- transport_sender) {}
+ transport_sender,
+ base::Bind(&IgnorePlayoutDelayChanges)) {}
using VideoSender::OnReceivedCastFeedback;
};
} // namespace
@@ -140,6 +150,7 @@ class VideoSenderTest : public ::testing::Test {
NULL,
testing_clock_,
dummy_endpoint,
+ make_scoped_ptr(new base::DictionaryValue),
base::Bind(&UpdateCastTransportStatus),
BulkRawEventsCallback(),
base::TimeDelta(),
@@ -158,7 +169,10 @@ class VideoSenderTest : public ::testing::Test {
EXPECT_EQ(TRANSPORT_VIDEO_INITIALIZED, status);
}
- void InitEncoder(bool external) {
+ // If |external| is true then external video encoder (VEA) is used.
+ // |expect_init_sucess| is true if initialization is expected to succeed.
+ CastInitializationStatus InitEncoder(bool external,
+ bool expect_init_success) {
VideoSenderConfig video_config;
video_config.ssrc = 1;
video_config.incoming_feedback_ssrc = 2;
@@ -174,28 +188,36 @@ class VideoSenderTest : public ::testing::Test {
video_config.max_frame_rate = 30;
video_config.max_number_of_video_buffers_used = 1;
video_config.codec = CODEC_VIDEO_VP8;
+ CastInitializationStatus status = STATUS_VIDEO_UNINITIALIZED;
if (external) {
- scoped_ptr<VideoEncodeAccelerator> fake_vea(
- new test::FakeVideoEncodeAccelerator(task_runner_,
- &stored_bitrates_));
+ test::FakeVideoEncodeAccelerator* fake_vea =
+ new test::FakeVideoEncodeAccelerator(
+ task_runner_, &stored_bitrates_);
+ fake_vea->SetWillInitializationSucceed(expect_init_success);
+ scoped_ptr<VideoEncodeAccelerator> fake_vea_owner(fake_vea);
video_sender_.reset(
new PeerVideoSender(cast_environment_,
video_config,
+ base::Bind(&SaveInitializationStatus,
+ &status),
base::Bind(&CreateVideoEncodeAccelerator,
task_runner_,
- base::Passed(&fake_vea)),
+ base::Passed(&fake_vea_owner)),
base::Bind(&CreateSharedMemory),
transport_sender_.get()));
} else {
video_sender_.reset(
new PeerVideoSender(cast_environment_,
video_config,
+ base::Bind(&SaveInitializationStatus,
+ &status),
CreateDefaultVideoEncodeAcceleratorCallback(),
CreateDefaultVideoEncodeMemoryCallback(),
transport_sender_.get()));
}
- ASSERT_EQ(STATUS_VIDEO_INITIALIZED, video_sender_->InitializationResult());
+ task_runner_->RunTasks();
+ return status;
}
scoped_refptr<media::VideoFrame> GetNewVideoFrame() {
@@ -203,7 +225,7 @@ class VideoSenderTest : public ::testing::Test {
scoped_refptr<media::VideoFrame> video_frame =
media::VideoFrame::CreateFrame(
VideoFrame::I420, size, gfx::Rect(size), size, base::TimeDelta());
- PopulateVideoFrame(video_frame, last_pixel_value_++);
+ PopulateVideoFrame(video_frame.get(), last_pixel_value_++);
return video_frame;
}
@@ -212,7 +234,7 @@ class VideoSenderTest : public ::testing::Test {
scoped_refptr<media::VideoFrame> video_frame =
media::VideoFrame::CreateFrame(
VideoFrame::I420, size, gfx::Rect(size), size, base::TimeDelta());
- PopulateVideoFrameWithNoise(video_frame);
+ PopulateVideoFrameWithNoise(video_frame.get());
return video_frame;
}
@@ -233,7 +255,7 @@ class VideoSenderTest : public ::testing::Test {
};
TEST_F(VideoSenderTest, BuiltInEncoder) {
- InitEncoder(false);
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true));
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
const base::TimeTicks capture_time = testing_clock_->NowTicks();
@@ -245,8 +267,7 @@ TEST_F(VideoSenderTest, BuiltInEncoder) {
}
TEST_F(VideoSenderTest, ExternalEncoder) {
- InitEncoder(true);
- task_runner_->RunTasks();
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(true, true));
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
@@ -261,14 +282,19 @@ TEST_F(VideoSenderTest, ExternalEncoder) {
// Fixed bitrate is used for external encoder. Bitrate is only once
// to the encoder.
EXPECT_EQ(1u, stored_bitrates_.size());
+ video_sender_.reset(NULL);
+ task_runner_->RunTasks();
+}
- // We need to run the task to cleanup the GPU instance.
+TEST_F(VideoSenderTest, ExternalEncoderInitFails) {
+ EXPECT_EQ(STATUS_HW_VIDEO_ENCODER_NOT_SUPPORTED,
+ InitEncoder(true, false));
video_sender_.reset(NULL);
task_runner_->RunTasks();
}
TEST_F(VideoSenderTest, RtcpTimer) {
- InitEncoder(false);
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true));
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
@@ -292,7 +318,7 @@ TEST_F(VideoSenderTest, RtcpTimer) {
}
TEST_F(VideoSenderTest, ResendTimer) {
- InitEncoder(false);
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true));
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
@@ -320,7 +346,7 @@ TEST_F(VideoSenderTest, ResendTimer) {
}
TEST_F(VideoSenderTest, LogAckReceivedEvent) {
- InitEncoder(false);
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true));
SimpleEventSubscriber event_subscriber;
cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber);
@@ -352,7 +378,7 @@ TEST_F(VideoSenderTest, LogAckReceivedEvent) {
}
TEST_F(VideoSenderTest, StopSendingInTheAbsenceOfAck) {
- InitEncoder(false);
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true));
// Send a stream of frames and don't ACK; by default we shouldn't have more
// than 4 frames in flight.
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
@@ -398,7 +424,7 @@ TEST_F(VideoSenderTest, StopSendingInTheAbsenceOfAck) {
}
TEST_F(VideoSenderTest, DuplicateAckRetransmit) {
- InitEncoder(false);
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true));
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
video_sender_->InsertRawVideoFrame(video_frame, testing_clock_->NowTicks());
RunTasks(33);
@@ -438,7 +464,7 @@ TEST_F(VideoSenderTest, DuplicateAckRetransmit) {
}
TEST_F(VideoSenderTest, DuplicateAckRetransmitDoesNotCancelRetransmits) {
- InitEncoder(false);
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true));
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
video_sender_->InsertRawVideoFrame(video_frame, testing_clock_->NowTicks());
RunTasks(33);
@@ -489,7 +515,7 @@ TEST_F(VideoSenderTest, DuplicateAckRetransmitDoesNotCancelRetransmits) {
}
TEST_F(VideoSenderTest, AcksCancelRetransmits) {
- InitEncoder(false);
+ EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true));
transport_.SetPause(true);
scoped_refptr<media::VideoFrame> video_frame = GetLargeNewVideoFrame();
video_sender_->InsertRawVideoFrame(video_frame, testing_clock_->NowTicks());
diff --git a/media/cast/sender/vp8_encoder.cc b/media/cast/sender/vp8_encoder.cc
index b43b5c881e..918b1f4054 100644
--- a/media/cast/sender/vp8_encoder.cc
+++ b/media/cast/sender/vp8_encoder.cc
@@ -17,32 +17,18 @@ namespace cast {
static const uint32 kMinIntra = 300;
-static int ComputeMaxNumOfRepeatedBuffes(int max_unacked_frames) {
- if (max_unacked_frames > kNumberOfVp8VideoBuffers)
- return (max_unacked_frames - 1) / kNumberOfVp8VideoBuffers;
-
- return 0;
-}
-
Vp8Encoder::Vp8Encoder(const VideoSenderConfig& video_config,
int max_unacked_frames)
: cast_config_(video_config),
use_multiple_video_buffers_(
cast_config_.max_number_of_video_buffers_used ==
kNumberOfVp8VideoBuffers),
- max_number_of_repeated_buffers_in_a_row_(
- ComputeMaxNumOfRepeatedBuffes(max_unacked_frames)),
key_frame_requested_(true),
first_frame_received_(false),
last_encoded_frame_id_(kStartFrameId),
- number_of_repeated_buffers_(0) {
- // TODO(pwestin): we need to figure out how to synchronize the acking with the
- // internal state of the encoder, ideally the encoder will tell if we can
- // send another frame.
- DCHECK(!use_multiple_video_buffers_ ||
- max_number_of_repeated_buffers_in_a_row_ == 0)
- << "Invalid config";
-
+ last_acked_frame_id_(kStartFrameId),
+ frame_id_to_reference_(kStartFrameId - 1),
+ undroppable_frames_(0) {
// VP8 have 3 buffers available for prediction, with
// max_number_of_video_buffers_used set to 1 we maximize the coding efficiency
// however in this mode we can not skip frames in the receiver to catch up
@@ -71,11 +57,11 @@ void Vp8Encoder::Initialize() {
// pointer will be set during encode. Setting align to 1, as it is
// meaningless (actual memory is not allocated).
raw_image_ = vpx_img_wrap(
- NULL, IMG_FMT_I420, cast_config_.width, cast_config_.height, 1, NULL);
+ NULL, VPX_IMG_FMT_I420, cast_config_.width, cast_config_.height, 1, NULL);
for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) {
- acked_frame_buffers_[i] = true;
- used_buffers_frame_id_[i] = kStartFrameId;
+ buffer_state_[i].frame_id = kStartFrameId;
+ buffer_state_[i].state = kBufferStartState;
}
InitEncode(cast_config_.number_of_encode_threads);
}
@@ -137,18 +123,18 @@ bool Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame,
DCHECK(thread_checker_.CalledOnValidThread());
// Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const.
- raw_image_->planes[PLANE_Y] =
+ raw_image_->planes[VPX_PLANE_Y] =
const_cast<uint8*>(video_frame->data(VideoFrame::kYPlane));
- raw_image_->planes[PLANE_U] =
+ raw_image_->planes[VPX_PLANE_U] =
const_cast<uint8*>(video_frame->data(VideoFrame::kUPlane));
- raw_image_->planes[PLANE_V] =
+ raw_image_->planes[VPX_PLANE_V] =
const_cast<uint8*>(video_frame->data(VideoFrame::kVPlane));
raw_image_->stride[VPX_PLANE_Y] = video_frame->stride(VideoFrame::kYPlane);
raw_image_->stride[VPX_PLANE_U] = video_frame->stride(VideoFrame::kUPlane);
raw_image_->stride[VPX_PLANE_V] = video_frame->stride(VideoFrame::kVPlane);
- uint8 latest_frame_id_to_reference;
+ uint32 latest_frame_id_to_reference;
Vp8Buffers buffer_to_update;
vpx_codec_flags_t flags = 0;
if (key_frame_requested_) {
@@ -160,8 +146,7 @@ bool Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame,
buffer_to_update = kLastBuffer;
} else {
// Reference all acked frames (buffers).
- latest_frame_id_to_reference = GetLatestFrameIdToReference();
- GetCodecReferenceFlags(&flags);
+ latest_frame_id_to_reference = GetCodecReferenceFlags(&flags);
buffer_to_update = GetNextBufferToUpdate();
GetCodecUpdateFlags(buffer_to_update, &flags);
}
@@ -214,6 +199,7 @@ bool Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame,
// Populate the encoded frame.
encoded_image->frame_id = ++last_encoded_frame_id_;
if (is_key_frame) {
+ // TODO(Hubbe): Replace "dependency" with a "bool is_key_frame".
encoded_image->dependency = EncodedFrame::KEY;
encoded_image->referenced_frame_id = encoded_image->frame_id;
} else {
@@ -228,108 +214,130 @@ bool Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame,
key_frame_requested_ = false;
for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) {
- used_buffers_frame_id_[i] = encoded_image->frame_id;
+ buffer_state_[i].state = kBufferSent;
+ buffer_state_[i].frame_id = encoded_image->frame_id;
}
- // We can pick any buffer as last_used_vp8_buffer_ since we update
- // them all.
- last_used_vp8_buffer_ = buffer_to_update;
} else {
if (buffer_to_update != kNoBuffer) {
- acked_frame_buffers_[buffer_to_update] = false;
- used_buffers_frame_id_[buffer_to_update] = encoded_image->frame_id;
- last_used_vp8_buffer_ = buffer_to_update;
+ buffer_state_[buffer_to_update].state = kBufferSent;
+ buffer_state_[buffer_to_update].frame_id = encoded_image->frame_id;
}
}
return true;
}
-void Vp8Encoder::GetCodecReferenceFlags(vpx_codec_flags_t* flags) {
+uint32 Vp8Encoder::GetCodecReferenceFlags(vpx_codec_flags_t* flags) {
if (!use_multiple_video_buffers_)
- return;
+ return last_encoded_frame_id_;
- // We need to reference something.
- DCHECK(acked_frame_buffers_[kAltRefBuffer] ||
- acked_frame_buffers_[kGoldenBuffer] ||
- acked_frame_buffers_[kLastBuffer])
- << "Invalid state";
+ const uint32 kMagicFrameOffset = 512;
+ // We set latest_frame_to_reference to an old frame so that
+ // IsNewerFrameId will work correctly.
+ uint32 latest_frame_to_reference =
+ last_encoded_frame_id_ - kMagicFrameOffset;
- if (!acked_frame_buffers_[kAltRefBuffer]) {
- *flags |= VP8_EFLAG_NO_REF_ARF;
- }
- if (!acked_frame_buffers_[kGoldenBuffer]) {
- *flags |= VP8_EFLAG_NO_REF_GF;
+ // Reference all acked frames.
+ // TODO(hubbe): We may also want to allow references to the
+ // last encoded frame, if that frame was assigned to a buffer.
+ for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) {
+ if (buffer_state_[i].state == kBufferAcked) {
+ if (IsNewerFrameId(buffer_state_[i].frame_id,
+ latest_frame_to_reference)) {
+ latest_frame_to_reference = buffer_state_[i].frame_id;
+ }
+ } else {
+ switch (i) {
+ case kAltRefBuffer:
+ *flags |= VP8_EFLAG_NO_REF_ARF;
+ break;
+ case kGoldenBuffer:
+ *flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kLastBuffer:
+ *flags |= VP8_EFLAG_NO_REF_LAST;
+ break;
+ }
+ }
}
- if (!acked_frame_buffers_[kLastBuffer]) {
- *flags |= VP8_EFLAG_NO_REF_LAST;
+
+ if (latest_frame_to_reference ==
+ last_encoded_frame_id_ - kMagicFrameOffset) {
+ // We have nothing to reference, it's kind of like a key frame,
+ // but doesn't reset buffers.
+ latest_frame_to_reference = last_encoded_frame_id_ + 1;
}
+
+ return latest_frame_to_reference;
}
-uint32 Vp8Encoder::GetLatestFrameIdToReference() {
+Vp8Encoder::Vp8Buffers Vp8Encoder::GetNextBufferToUpdate() {
if (!use_multiple_video_buffers_)
- return last_encoded_frame_id_;
+ return kNoBuffer;
- int latest_frame_id_to_reference = -1;
- if (acked_frame_buffers_[kAltRefBuffer]) {
- latest_frame_id_to_reference = used_buffers_frame_id_[kAltRefBuffer];
- }
- if (acked_frame_buffers_[kGoldenBuffer]) {
- if (latest_frame_id_to_reference == -1) {
- latest_frame_id_to_reference = used_buffers_frame_id_[kGoldenBuffer];
- } else {
- if (IsNewerFrameId(used_buffers_frame_id_[kGoldenBuffer],
- latest_frame_id_to_reference)) {
- latest_frame_id_to_reference = used_buffers_frame_id_[kGoldenBuffer];
- }
+ // The goal here is to make sure that we always keep one ACKed
+ // buffer while trying to get an ACK for a newer buffer as we go.
+ // Here are the rules for which buffer to select for update:
+ // 1. If there is a buffer in state kStartState, use it.
+ // 2. If there is a buffer other than the oldest buffer
+ // which is Acked, use the oldest buffer.
+ // 3. If there are Sent buffers which are older than
+ // latest_acked_frame_, use the oldest one.
+ // 4. If all else fails, just overwrite the newest buffer,
+ // but no more than 3 times in a row.
+ // TODO(hubbe): Figure out if 3 is optimal.
+ // Note, rule 1-3 describe cases where there is a "free" buffer
+ // that we can use. Rule 4 describes what happens when there is
+ // no free buffer available.
+
+ // Buffers, sorted from oldest frame to newest.
+ Vp8Encoder::Vp8Buffers buffers[kNumberOfVp8VideoBuffers];
+
+ for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) {
+ Vp8Encoder::Vp8Buffers buffer = static_cast<Vp8Encoder::Vp8Buffers>(i);
+
+ // Rule 1
+ if (buffer_state_[buffer].state == kBufferStartState) {
+ undroppable_frames_ = 0;
+ return buffer;
}
+ buffers[buffer] = buffer;
}
- if (acked_frame_buffers_[kLastBuffer]) {
- if (latest_frame_id_to_reference == -1) {
- latest_frame_id_to_reference = used_buffers_frame_id_[kLastBuffer];
- } else {
- if (IsNewerFrameId(used_buffers_frame_id_[kLastBuffer],
- latest_frame_id_to_reference)) {
- latest_frame_id_to_reference = used_buffers_frame_id_[kLastBuffer];
+
+ // Sorting three elements with selection sort.
+ for (int i = 0; i < kNumberOfVp8VideoBuffers - 1; i++) {
+ for (int j = i + 1; j < kNumberOfVp8VideoBuffers; j++) {
+ if (IsOlderFrameId(buffer_state_[buffers[j]].frame_id,
+ buffer_state_[buffers[i]].frame_id)) {
+ std::swap(buffers[i], buffers[j]);
}
}
}
- DCHECK(latest_frame_id_to_reference != -1) << "Invalid state";
- return static_cast<uint32>(latest_frame_id_to_reference);
-}
-Vp8Encoder::Vp8Buffers Vp8Encoder::GetNextBufferToUpdate() {
- if (!use_multiple_video_buffers_)
- return kNoBuffer;
+ // Rule 2
+ if (buffer_state_[buffers[1]].state == kBufferAcked ||
+ buffer_state_[buffers[2]].state == kBufferAcked) {
+ undroppable_frames_ = 0;
+ return buffers[0];
+ }
- // Update at most one buffer, except for key-frames.
+ // Rule 3
+ for (int i = 0; i < kNumberOfVp8VideoBuffers; i++) {
+ if (buffer_state_[buffers[i]].state == kBufferSent &&
+ IsOlderFrameId(buffer_state_[buffers[i]].frame_id,
+ last_acked_frame_id_)) {
+ undroppable_frames_ = 0;
+ return buffers[i];
+ }
+ }
- Vp8Buffers buffer_to_update = kNoBuffer;
- if (number_of_repeated_buffers_ < max_number_of_repeated_buffers_in_a_row_) {
- // TODO(pwestin): experiment with this. The issue with only this change is
- // that we can end up with only 4 frames in flight when we expect 6.
- // buffer_to_update = last_used_vp8_buffer_;
- buffer_to_update = kNoBuffer;
- ++number_of_repeated_buffers_;
+ // Rule 4
+ if (undroppable_frames_ >= 3) {
+ undroppable_frames_ = 0;
+ return kNoBuffer;
} else {
- number_of_repeated_buffers_ = 0;
- switch (last_used_vp8_buffer_) {
- case kAltRefBuffer:
- buffer_to_update = kLastBuffer;
- VLOG(1) << "VP8 update last buffer";
- break;
- case kLastBuffer:
- buffer_to_update = kGoldenBuffer;
- VLOG(1) << "VP8 update golden buffer";
- break;
- case kGoldenBuffer:
- buffer_to_update = kAltRefBuffer;
- VLOG(1) << "VP8 update alt-ref buffer";
- break;
- case kNoBuffer:
- DCHECK(false) << "Invalid state";
- break;
- }
+ undroppable_frames_++;
+ return buffers[kNumberOfVp8VideoBuffers - 1];
}
- return buffer_to_update;
}
void Vp8Encoder::GetCodecUpdateFlags(Vp8Buffers buffer_to_update,
@@ -381,10 +389,14 @@ void Vp8Encoder::LatestFrameIdToReference(uint32 frame_id) {
VLOG(1) << "VP8 ok to reference frame:" << static_cast<int>(frame_id);
for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) {
- if (frame_id == used_buffers_frame_id_[i]) {
- acked_frame_buffers_[i] = true;
+ if (frame_id == buffer_state_[i].frame_id) {
+ buffer_state_[i].state = kBufferAcked;
+ break;
}
}
+ if (IsOlderFrameId(last_acked_frame_id_, frame_id)) {
+ last_acked_frame_id_ = frame_id;
+ }
}
void Vp8Encoder::GenerateKeyFrame() {
diff --git a/media/cast/sender/vp8_encoder.h b/media/cast/sender/vp8_encoder.h
index 0437dbc2f5..abe5eebb88 100644
--- a/media/cast/sender/vp8_encoder.h
+++ b/media/cast/sender/vp8_encoder.h
@@ -55,6 +55,16 @@ class Vp8Encoder : public SoftwareVideoEncoder {
kNoBuffer = 3 // Note: must be last.
};
+ enum Vp8BufferState {
+ kBufferStartState,
+ kBufferSent,
+ kBufferAcked
+ };
+ struct BufferState {
+ uint32 frame_id;
+ Vp8BufferState state;
+ };
+
void InitEncode(int number_of_cores);
// Calculate the max target in % for a keyframe.
@@ -63,11 +73,9 @@ class Vp8Encoder : public SoftwareVideoEncoder {
// Calculate which next Vp8 buffers to update with the next frame.
Vp8Buffers GetNextBufferToUpdate();
- // Calculate which previous frame to reference.
- uint32 GetLatestFrameIdToReference();
-
// Get encoder flags for our referenced encoder buffers.
- void GetCodecReferenceFlags(vpx_codec_flags_t* flags);
+ // Return which previous frame to reference.
+ uint32 GetCodecReferenceFlags(vpx_codec_flags_t* flags);
// Get encoder flags for our encoder buffers to update with next frame.
void GetCodecUpdateFlags(Vp8Buffers buffer_to_update,
@@ -75,7 +83,6 @@ class Vp8Encoder : public SoftwareVideoEncoder {
const VideoSenderConfig cast_config_;
const bool use_multiple_video_buffers_;
- const int max_number_of_repeated_buffers_in_a_row_;
// VP8 internal objects.
scoped_ptr<vpx_codec_enc_cfg_t> config_;
@@ -86,10 +93,10 @@ class Vp8Encoder : public SoftwareVideoEncoder {
bool first_frame_received_;
base::TimeDelta first_frame_timestamp_;
uint32 last_encoded_frame_id_;
- uint32 used_buffers_frame_id_[kNumberOfVp8VideoBuffers];
- bool acked_frame_buffers_[kNumberOfVp8VideoBuffers];
- Vp8Buffers last_used_vp8_buffer_;
- int number_of_repeated_buffers_;
+ uint32 last_acked_frame_id_;
+ uint32 frame_id_to_reference_;
+ uint32 undroppable_frames_;
+ BufferState buffer_state_[kNumberOfVp8VideoBuffers];
// This is bound to the thread where Initialize() is called.
base::ThreadChecker thread_checker_;
diff --git a/media/cast/test/cast_benchmarks.cc b/media/cast/test/cast_benchmarks.cc
index 07b4de5382..880f4ada8d 100644
--- a/media/cast/test/cast_benchmarks.cc
+++ b/media/cast/test/cast_benchmarks.cc
@@ -112,6 +112,7 @@ class CastTransportSenderWrapper : public CastTransportSender {
const CastTransportRtpConfig& config,
const RtcpCastMessageCallback& cast_message_cb,
const RtcpRttCallback& rtt_cb) OVERRIDE {
+ audio_ssrc_ = config.ssrc;
transport_->InitializeAudio(config, cast_message_cb, rtt_cb);
}
@@ -119,19 +120,18 @@ class CastTransportSenderWrapper : public CastTransportSender {
const CastTransportRtpConfig& config,
const RtcpCastMessageCallback& cast_message_cb,
const RtcpRttCallback& rtt_cb) OVERRIDE {
+ video_ssrc_ = config.ssrc;
transport_->InitializeVideo(config, cast_message_cb, rtt_cb);
}
- virtual void InsertCodedAudioFrame(
- const EncodedFrame& audio_frame) OVERRIDE {
- *encoded_audio_bytes_ += audio_frame.data.size();
- transport_->InsertCodedAudioFrame(audio_frame);
- }
-
- virtual void InsertCodedVideoFrame(
- const EncodedFrame& video_frame) OVERRIDE {
- *encoded_video_bytes_ += video_frame.data.size();
- transport_->InsertCodedVideoFrame(video_frame);
+ virtual void InsertFrame(uint32 ssrc,
+ const EncodedFrame& frame) OVERRIDE {
+ if (ssrc == audio_ssrc_) {
+ *encoded_audio_bytes_ += frame.data.size();
+ } else if (ssrc == video_ssrc_) {
+ *encoded_video_bytes_ += frame.data.size();
+ }
+ transport_->InsertFrame(ssrc, frame);
}
virtual void SendSenderReport(
@@ -160,6 +160,7 @@ class CastTransportSenderWrapper : public CastTransportSender {
private:
scoped_ptr<CastTransportSender> transport_;
+ uint32 audio_ssrc_, video_ssrc_;
uint64* encoded_video_bytes_;
uint64* encoded_audio_bytes_;
};
@@ -224,7 +225,7 @@ class RunOneBenchmark {
int max_number_of_video_buffers_used) {
audio_sender_config_.ssrc = 1;
audio_sender_config_.incoming_feedback_ssrc = 2;
- audio_sender_config_.target_playout_delay =
+ audio_sender_config_.max_playout_delay =
base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs);
audio_sender_config_.rtp_payload_type = 96;
audio_sender_config_.use_external_encoder = false;
@@ -246,7 +247,7 @@ class RunOneBenchmark {
video_sender_config_.ssrc = 3;
video_sender_config_.incoming_feedback_ssrc = 4;
- video_sender_config_.target_playout_delay =
+ video_sender_config_.max_playout_delay =
base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs);
video_sender_config_.rtp_payload_type = 97;
video_sender_config_.use_external_encoder = false;
@@ -301,6 +302,7 @@ class RunOneBenchmark {
NULL,
testing_clock_sender_,
dummy_endpoint,
+ make_scoped_ptr(new base::DictionaryValue),
base::Bind(&UpdateCastTransportStatus),
base::Bind(&IgnoreRawEvents),
base::TimeDelta::FromSeconds(1),
diff --git a/media/cast/test/end2end_unittest.cc b/media/cast/test/end2end_unittest.cc
index bb8b62d791..a80912a900 100644
--- a/media/cast/test/end2end_unittest.cc
+++ b/media/cast/test/end2end_unittest.cc
@@ -210,7 +210,7 @@ class LoopBackTransport : public PacketSender {
const base::Closure& cb) OVERRIDE {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
if (!send_packets_)
- return false;
+ return true;
bytes_sent_ += packet->data.size();
if (drop_packets_belonging_to_odd_frames_) {
@@ -390,7 +390,7 @@ class TestReceiverVideoCallback
bool is_continuous) {
++num_called_;
- ASSERT_TRUE(!!video_frame);
+ ASSERT_TRUE(!!video_frame.get());
ASSERT_FALSE(expected_frame_.empty());
ExpectedVideoFrame expected_video_frame = expected_frame_.front();
expected_frame_.pop_front();
@@ -403,9 +403,12 @@ class TestReceiverVideoCallback
scoped_refptr<media::VideoFrame> expected_I420_frame =
media::VideoFrame::CreateFrame(
VideoFrame::I420, size, gfx::Rect(size), size, base::TimeDelta());
- PopulateVideoFrame(expected_I420_frame, expected_video_frame.start_value);
+ PopulateVideoFrame(expected_I420_frame.get(),
+ expected_video_frame.start_value);
- EXPECT_GE(I420PSNR(expected_I420_frame, video_frame), kVideoAcceptedPSNR);
+ if (expected_video_frame.should_be_continuous) {
+ EXPECT_GE(I420PSNR(expected_I420_frame, video_frame), kVideoAcceptedPSNR);
+ }
EXPECT_NEAR(
(playout_time - expected_video_frame.playout_time).InMillisecondsF(),
@@ -471,7 +474,7 @@ class End2EndTest : public ::testing::Test {
int max_number_of_video_buffers_used) {
audio_sender_config_.ssrc = 1;
audio_sender_config_.incoming_feedback_ssrc = 2;
- audio_sender_config_.target_playout_delay =
+ audio_sender_config_.max_playout_delay =
base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs);
audio_sender_config_.rtp_payload_type = 96;
audio_sender_config_.use_external_encoder = false;
@@ -496,7 +499,7 @@ class End2EndTest : public ::testing::Test {
video_sender_config_.ssrc = 3;
video_sender_config_.incoming_feedback_ssrc = 4;
- video_sender_config_.target_playout_delay =
+ video_sender_config_.max_playout_delay =
base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs);
video_sender_config_.rtp_payload_type = 97;
video_sender_config_.use_external_encoder = false;
@@ -593,6 +596,7 @@ class End2EndTest : public ::testing::Test {
NULL,
testing_clock_sender_,
dummy_endpoint,
+ make_scoped_ptr(new base::DictionaryValue),
base::Bind(&UpdateCastTransportStatus),
base::Bind(&End2EndTest::LogRawEvents, base::Unretained(this)),
base::TimeDelta::FromMilliseconds(1),
@@ -609,6 +613,7 @@ class End2EndTest : public ::testing::Test {
base::Bind(&VideoInitializationStatus),
CreateDefaultVideoEncodeAcceleratorCallback(),
CreateDefaultVideoEncodeMemoryCallback());
+ task_runner_->RunTasks();
receiver_to_sender_.SetPacketReceiver(
transport_sender_->PacketReceiverForTesting(),
@@ -649,7 +654,7 @@ class End2EndTest : public ::testing::Test {
scoped_refptr<media::VideoFrame> video_frame =
media::VideoFrame::CreateFrame(
VideoFrame::I420, size, gfx::Rect(size), size, time_diff);
- PopulateVideoFrame(video_frame, start_value);
+ PopulateVideoFrame(video_frame.get(), start_value);
video_frame_input_->InsertRawVideoFrame(video_frame, capture_time);
}
@@ -987,79 +992,14 @@ TEST_F(End2EndTest, DISABLED_StartSenderBeforeReceiver) {
EXPECT_EQ(10, test_receiver_video_callback_->number_times_called());
}
-// This tests a network glitch lasting for 10 video frames.
-// Flaky. See crbug.com/351596.
-TEST_F(End2EndTest, DISABLED_GlitchWith3Buffers) {
- Configure(CODEC_VIDEO_VP8, CODEC_AUDIO_OPUS,
- kDefaultAudioSamplingRate, 3);
- video_sender_config_.target_playout_delay =
- base::TimeDelta::FromMilliseconds(67);
- video_receiver_config_.rtp_max_delay_ms = 67;
- Create();
-
- int video_start = kVideoStart;
- base::TimeTicks capture_time;
- // Frames will rendered on completion until the render time stabilizes, i.e.
- // we got enough data.
- const int frames_before_glitch = 20;
- for (int i = 0; i < frames_before_glitch; ++i) {
- capture_time = testing_clock_sender_->NowTicks();
- SendVideoFrame(video_start, capture_time);
- test_receiver_video_callback_->AddExpectedResult(
- video_start,
- video_sender_config_.width,
- video_sender_config_.height,
- capture_time + base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs),
- true);
- cast_receiver_->RequestDecodedVideoFrame(
- base::Bind(&TestReceiverVideoCallback::CheckVideoFrame,
- test_receiver_video_callback_));
- RunTasks(kFrameTimerMs);
- video_start++;
- }
-
- // Introduce a glitch lasting for 10 frames.
- sender_to_receiver_.SetSendPackets(false);
- for (int i = 0; i < 10; ++i) {
- capture_time = testing_clock_sender_->NowTicks();
- // First 3 will be sent and lost.
- SendVideoFrame(video_start, capture_time);
- RunTasks(kFrameTimerMs);
- video_start++;
- }
- sender_to_receiver_.SetSendPackets(true);
- RunTasks(100);
- capture_time = testing_clock_sender_->NowTicks();
-
- // Frame 1 should be acked by now and we should have an opening to send 4.
- SendVideoFrame(video_start, capture_time);
- RunTasks(kFrameTimerMs);
-
- // Frames 1-3 are old frames by now, and therefore should be decoded, but
- // not rendered. The next frame we expect to render is frame #4.
- test_receiver_video_callback_->AddExpectedResult(
- video_start,
- video_sender_config_.width,
- video_sender_config_.height,
- capture_time + base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs),
- true);
-
- cast_receiver_->RequestDecodedVideoFrame(
- base::Bind(&TestReceiverVideoCallback::CheckVideoFrame,
- test_receiver_video_callback_));
-
- RunTasks(2 * kFrameTimerMs + 1); // Empty the receiver pipeline.
- EXPECT_EQ(frames_before_glitch + 1,
- test_receiver_video_callback_->number_times_called());
-}
-
-// Disabled due to flakiness and crashiness. http://crbug.com/360951
-TEST_F(End2EndTest, DISABLED_DropEveryOtherFrame3Buffers) {
- Configure(CODEC_VIDEO_VP8, CODEC_AUDIO_OPUS,
- kDefaultAudioSamplingRate, 3);
- video_sender_config_.target_playout_delay =
- base::TimeDelta::FromMilliseconds(67);
- video_receiver_config_.rtp_max_delay_ms = 67;
+TEST_F(End2EndTest, DropEveryOtherFrame3Buffers) {
+ Configure(CODEC_VIDEO_VP8, CODEC_AUDIO_OPUS, kDefaultAudioSamplingRate, 3);
+ int target_delay = 300;
+ video_sender_config_.max_playout_delay =
+ base::TimeDelta::FromMilliseconds(target_delay);
+ audio_sender_config_.max_playout_delay =
+ base::TimeDelta::FromMilliseconds(target_delay);
+ video_receiver_config_.rtp_max_delay_ms = target_delay;
Create();
sender_to_receiver_.DropAllPacketsBelongingToOddFrames();
@@ -1077,7 +1017,7 @@ TEST_F(End2EndTest, DISABLED_DropEveryOtherFrame3Buffers) {
video_sender_config_.width,
video_sender_config_.height,
capture_time +
- base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs),
+ base::TimeDelta::FromMilliseconds(target_delay),
i == 0);
// GetRawVideoFrame will not return the frame until we are close in
@@ -1090,7 +1030,7 @@ TEST_F(End2EndTest, DISABLED_DropEveryOtherFrame3Buffers) {
video_start++;
}
- RunTasks(2 * kFrameTimerMs + 1); // Empty the pipeline.
+ RunTasks(2 * kFrameTimerMs + target_delay); // Empty the pipeline.
EXPECT_EQ(i / 2, test_receiver_video_callback_->number_times_called());
}
@@ -1458,6 +1398,32 @@ TEST_F(End2EndTest, EvilNetwork) {
base::TimeTicks test_end = testing_clock_receiver_->NowTicks();
RunTasks(100 * kFrameTimerMs + 1); // Empty the pipeline.
EXPECT_GT(video_ticks_.size(), 100ul);
+ VLOG(1) << "Fully transmitted " << video_ticks_.size()
+ << " out of 10000 frames.";
+ EXPECT_LT((video_ticks_.back().second - test_end).InMilliseconds(), 1000);
+}
+
+// Tests that a system configured for 30 FPS drops frames when input is provided
+// at a much higher frame rate.
+TEST_F(End2EndTest, ShoveHighFrameRateDownYerThroat) {
+ Configure(CODEC_VIDEO_FAKE, CODEC_AUDIO_PCM16, 32000,
+ 1);
+ receiver_to_sender_.SetPacketPipe(test::EvilNetwork().Pass());
+ sender_to_receiver_.SetPacketPipe(test::EvilNetwork().Pass());
+ Create();
+ StartBasicPlayer();
+
+ int frames_counter = 0;
+ for (; frames_counter < 10000; ++frames_counter) {
+ SendFakeVideoFrame(testing_clock_sender_->NowTicks());
+ RunTasks(10 /* 10 ms, but 33.3 expected by system */);
+ }
+ base::TimeTicks test_end = testing_clock_receiver_->NowTicks();
+ RunTasks(100 * kFrameTimerMs + 1); // Empty the pipeline.
+ EXPECT_LT(100ul, video_ticks_.size());
+ EXPECT_GE(3334ul, video_ticks_.size());
+ VLOG(1) << "Fully transmitted " << video_ticks_.size()
+ << " out of 10000 frames.";
EXPECT_LT((video_ticks_.back().second - test_end).InMilliseconds(), 1000);
}
@@ -1496,6 +1462,12 @@ TEST_F(End2EndTest, OldPacketNetwork) {
TEST_F(End2EndTest, TestSetPlayoutDelay) {
Configure(CODEC_VIDEO_FAKE, CODEC_AUDIO_PCM16, 32000, 1);
+ video_sender_config_.min_playout_delay =
+ video_sender_config_.max_playout_delay;
+ audio_sender_config_.min_playout_delay =
+ audio_sender_config_.max_playout_delay;
+ video_sender_config_.max_playout_delay = base::TimeDelta::FromSeconds(1);
+ audio_sender_config_.max_playout_delay = base::TimeDelta::FromSeconds(1);
Create();
StartBasicPlayer();
const int kNewDelay = 600;
diff --git a/media/cast/test/fake_media_source.cc b/media/cast/test/fake_media_source.cc
index f3ebd9cbe1..73b249336a 100644
--- a/media/cast/test/fake_media_source.cc
+++ b/media/cast/test/fake_media_source.cc
@@ -147,7 +147,6 @@ void FakeMediaSource::SetSourceFile(const base::FilePath& video_file,
AudioParameters::AUDIO_PCM_LINEAR,
layout,
av_codec_context->channels,
- av_codec_context->channels,
av_codec_context->sample_rate,
8 * av_get_bytes_per_sample(av_codec_context->sample_fmt),
av_codec_context->sample_rate / kAudioPacketsPerSecond);
@@ -233,7 +232,7 @@ void FakeMediaSource::SendNextFakeFrame() {
gfx::Size size(video_config_.width, video_config_.height);
scoped_refptr<VideoFrame> video_frame =
VideoFrame::CreateBlackFrame(size);
- PopulateVideoFrame(video_frame, synthetic_count_);
+ PopulateVideoFrame(video_frame.get(), synthetic_count_);
++synthetic_count_;
base::TimeTicks now = clock_->NowTicks();
@@ -302,17 +301,17 @@ bool FakeMediaSource::SendNextTranscodedVideo(base::TimeDelta elapsed_time) {
decoded_frame->data(VideoFrame::kYPlane),
decoded_frame->stride(VideoFrame::kYPlane),
decoded_frame->rows(VideoFrame::kYPlane),
- video_frame);
+ video_frame.get());
media::CopyPlane(VideoFrame::kUPlane,
decoded_frame->data(VideoFrame::kUPlane),
decoded_frame->stride(VideoFrame::kUPlane),
decoded_frame->rows(VideoFrame::kUPlane),
- video_frame);
+ video_frame.get());
media::CopyPlane(VideoFrame::kVPlane,
decoded_frame->data(VideoFrame::kVPlane),
decoded_frame->stride(VideoFrame::kVPlane),
decoded_frame->rows(VideoFrame::kVPlane),
- video_frame);
+ video_frame.get());
base::TimeDelta video_time;
// Use the timestamp from the file if we're transcoding.
diff --git a/media/cast/test/fake_video_encode_accelerator.cc b/media/cast/test/fake_video_encode_accelerator.cc
index 0442c0c928..23a6fb315e 100644
--- a/media/cast/test/fake_video_encode_accelerator.cc
+++ b/media/cast/test/fake_video_encode_accelerator.cc
@@ -23,6 +23,7 @@ FakeVideoEncodeAccelerator::FakeVideoEncodeAccelerator(
stored_bitrates_(stored_bitrates),
client_(NULL),
first_(true),
+ will_initialization_succeed_(true),
weak_this_factory_(this) {
DCHECK(stored_bitrates_);
}
@@ -31,12 +32,19 @@ FakeVideoEncodeAccelerator::~FakeVideoEncodeAccelerator() {
weak_this_factory_.InvalidateWeakPtrs();
}
+std::vector<VideoEncodeAccelerator::SupportedProfile>
+FakeVideoEncodeAccelerator::GetSupportedProfiles() {
+ return std::vector<VideoEncodeAccelerator::SupportedProfile>();
+}
+
bool FakeVideoEncodeAccelerator::Initialize(
media::VideoFrame::Format input_format,
const gfx::Size& input_visible_size,
VideoCodecProfile output_profile,
uint32 initial_bitrate,
Client* client) {
+ if (!will_initialization_succeed_)
+ return false;
client_ = client;
if (output_profile != media::VP8PROFILE_ANY &&
output_profile != media::H264PROFILE_MAIN) {
diff --git a/media/cast/test/fake_video_encode_accelerator.h b/media/cast/test/fake_video_encode_accelerator.h
index 30e772b74e..740b8aa9a7 100644
--- a/media/cast/test/fake_video_encode_accelerator.h
+++ b/media/cast/test/fake_video_encode_accelerator.h
@@ -28,6 +28,8 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
std::vector<uint32>* stored_bitrates);
virtual ~FakeVideoEncodeAccelerator();
+ virtual std::vector<VideoEncodeAccelerator::SupportedProfile>
+ GetSupportedProfiles() OVERRIDE;
virtual bool Initialize(media::VideoFrame::Format input_format,
const gfx::Size& input_visible_size,
VideoCodecProfile output_profile,
@@ -45,6 +47,9 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
virtual void Destroy() OVERRIDE;
void SendDummyFrameForTesting(bool key_frame);
+ void SetWillInitializationSucceed(bool will_initialization_succeed) {
+ will_initialization_succeed_ = will_initialization_succeed;
+ }
private:
void DoRequireBitstreamBuffers(unsigned int input_count,
@@ -58,6 +63,7 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
std::vector<uint32>* const stored_bitrates_;
VideoEncodeAccelerator::Client* client_;
bool first_;
+ bool will_initialization_succeed_;
std::list<int32> available_buffer_ids_;
diff --git a/media/cast/test/proto/BUILD.gn b/media/cast/test/proto/BUILD.gn
index 85c19cfe2f..78b82e3aeb 100644
--- a/media/cast/test/proto/BUILD.gn
+++ b/media/cast/test/proto/BUILD.gn
@@ -6,19 +6,16 @@ import("//third_party/protobuf/proto_library.gni")
# GYP version: media/cast/cast.gyp:cast_logging_proto
source_set("proto") {
- deps = [
+ public_deps = [
":cast_network_simulation_proto",
]
- direct_dependent_configs = [
+ public_configs = [
"//third_party/protobuf:protobuf_config"
]
- forward_dependent_configs_from = [
- ":cast_network_simulation_proto",
- ]
}
proto_library("cast_network_simulation_proto") {
- visibility = ":proto"
+ visibility = [ ":proto" ]
sources = [
"network_simulation_model.proto",
]
diff --git a/media/cast/test/sender.cc b/media/cast/test/sender.cc
index 40ca369329..38aef308fe 100644
--- a/media/cast/test/sender.cc
+++ b/media/cast/test/sender.cc
@@ -75,7 +75,7 @@ media::cast::AudioSenderConfig GetAudioSenderConfig() {
audio_config.rtp_payload_type = 127;
// TODO(miu): The default in cast_defines.h is 100. Should this be 100, and
// should receiver.cc's config also be 100?
- audio_config.target_playout_delay = base::TimeDelta::FromMilliseconds(300);
+ audio_config.max_playout_delay = base::TimeDelta::FromMilliseconds(300);
return audio_config;
}
@@ -109,7 +109,7 @@ media::cast::VideoSenderConfig GetVideoSenderConfig() {
video_config.rtp_payload_type = 96;
// TODO(miu): The default in cast_defines.h is 100. Should this be 100, and
// should receiver.cc's config also be 100?
- video_config.target_playout_delay = base::TimeDelta::FromMilliseconds(300);
+ video_config.max_playout_delay = base::TimeDelta::FromMilliseconds(300);
return video_config;
}
@@ -322,6 +322,7 @@ int main(int argc, char** argv) {
NULL, // net log.
cast_environment->Clock(),
remote_endpoint,
+ make_scoped_ptr(new base::DictionaryValue), // options
base::Bind(&UpdateCastTransportStatus),
base::Bind(&LogRawEvents, cast_environment),
base::TimeDelta::FromSeconds(1),
@@ -355,7 +356,7 @@ int main(int argc, char** argv) {
// Subscribers for stats.
scoped_ptr<media::cast::ReceiverTimeOffsetEstimatorImpl> offset_estimator(
- new media::cast::ReceiverTimeOffsetEstimatorImpl);
+ new media::cast::ReceiverTimeOffsetEstimatorImpl());
cast_environment->Logging()->AddRawEventSubscriber(offset_estimator.get());
scoped_ptr<media::cast::StatsEventSubscriber> video_stats_subscriber(
new media::cast::StatsEventSubscriber(media::cast::VIDEO_EVENT,
diff --git a/media/cast/test/simulator.cc b/media/cast/test/simulator.cc
index 39ff725a86..e3872a98d9 100644
--- a/media/cast/test/simulator.cc
+++ b/media/cast/test/simulator.cc
@@ -11,6 +11,9 @@
// File path to writing out the raw event log of the simulation session.
// --sim-id=
// Unique simulation ID.
+// --target-delay-ms=
+// Target playout delay to configure (integer number of milliseconds).
+// Optional; default is 400.
//
// Output:
// - Raw event log of the simulation session tagged with the unique test ID,
@@ -19,13 +22,14 @@
#include "base/at_exit.h"
#include "base/base_paths.h"
#include "base/command_line.h"
-#include "base/file_util.h"
#include "base/files/file_path.h"
+#include "base/files/file_util.h"
#include "base/files/memory_mapped_file.h"
#include "base/files/scoped_file.h"
#include "base/json/json_writer.h"
#include "base/logging.h"
#include "base/path_service.h"
+#include "base/strings/string_number_conversions.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/thread_task_runner_handle.h"
#include "base/time/tick_clock.h"
@@ -65,12 +69,23 @@ using media::cast::proto::NetworkSimulationModelType;
namespace media {
namespace cast {
namespace {
-const int kTargetDelay = 400;
const char kSourcePath[] = "source";
const char kModelPath[] = "model";
const char kOutputPath[] = "output";
const char kSimulationId[] = "sim-id";
const char kLibDir[] = "lib-dir";
+const char kTargetDelay[] = "target-delay-ms";
+
+base::TimeDelta GetTargetPlayoutDelay() {
+ const std::string delay_str =
+ CommandLine::ForCurrentProcess()->GetSwitchValueASCII(kTargetDelay);
+ if (delay_str.empty())
+ return base::TimeDelta::FromMilliseconds(400);
+ int delay_ms;
+ CHECK(base::StringToInt(delay_str, &delay_ms));
+ CHECK_GT(delay_ms, 0);
+ return base::TimeDelta::FromMilliseconds(delay_ms);
+}
void UpdateCastTransportStatus(CastTransportStatus status) {
LOG(INFO) << "Cast transport status: " << status;
@@ -219,28 +234,26 @@ void RunSimulation(const base::FilePath& source_path,
// Audio sender config.
AudioSenderConfig audio_sender_config = GetDefaultAudioSenderConfig();
- audio_sender_config.target_playout_delay =
- base::TimeDelta::FromMilliseconds(kTargetDelay);
+ audio_sender_config.max_playout_delay = GetTargetPlayoutDelay();
// Audio receiver config.
FrameReceiverConfig audio_receiver_config =
GetDefaultAudioReceiverConfig();
audio_receiver_config.rtp_max_delay_ms =
- audio_sender_config.target_playout_delay.InMilliseconds();
+ audio_sender_config.max_playout_delay.InMilliseconds();
// Video sender config.
VideoSenderConfig video_sender_config = GetDefaultVideoSenderConfig();
video_sender_config.max_bitrate = 2500000;
video_sender_config.min_bitrate = 2000000;
video_sender_config.start_bitrate = 2000000;
- video_sender_config.target_playout_delay =
- base::TimeDelta::FromMilliseconds(kTargetDelay);
+ video_sender_config.max_playout_delay = GetTargetPlayoutDelay();
// Video receiver config.
FrameReceiverConfig video_receiver_config =
GetDefaultVideoReceiverConfig();
video_receiver_config.rtp_max_delay_ms =
- video_sender_config.target_playout_delay.InMilliseconds();
+ video_sender_config.max_playout_delay.InMilliseconds();
// Loopback transport.
LoopBackTransport receiver_to_sender(receiver_env);
@@ -259,6 +272,7 @@ void RunSimulation(const base::FilePath& source_path,
NULL,
&testing_clock,
net::IPEndPoint(),
+ make_scoped_ptr(new base::DictionaryValue),
base::Bind(&UpdateCastTransportStatus),
base::Bind(&LogTransportEvents, sender_env),
base::TimeDelta::FromSeconds(1),
@@ -310,6 +324,7 @@ void RunSimulation(const base::FilePath& source_path,
base::Bind(&VideoInitializationStatus),
CreateDefaultVideoEncodeAcceleratorCallback(),
CreateDefaultVideoEncodeMemoryCallback());
+ task_runner->RunTasks();
// Start sending.
if (!source_path.empty()) {
@@ -353,6 +368,7 @@ void RunSimulation(const base::FilePath& source_path,
int encoded_video_frames = 0;
int dropped_video_frames = 0;
int late_video_frames = 0;
+ int64 total_delay_of_late_frames_ms = 0;
int64 encoded_size = 0;
int64 target_bitrate = 0;
for (size_t i = 0; i < video_frame_events.size(); ++i) {
@@ -366,8 +382,10 @@ void RunSimulation(const base::FilePath& source_path,
} else {
++dropped_video_frames;
}
- if (event.has_delay_millis() && event.delay_millis() < 0)
+ if (event.has_delay_millis() && event.delay_millis() < 0) {
++late_video_frames;
+ total_delay_of_late_frames_ms += -event.delay_millis();
+ }
}
double avg_encoded_bitrate =
@@ -377,10 +395,18 @@ void RunSimulation(const base::FilePath& source_path,
double avg_target_bitrate =
!encoded_video_frames ? 0 : target_bitrate / encoded_video_frames / 1000;
+ LOG(INFO) << "Configured target playout delay (ms): "
+ << video_receiver_config.rtp_max_delay_ms;
LOG(INFO) << "Audio frame count: " << audio_frame_count;
LOG(INFO) << "Total video frames: " << total_video_frames;
LOG(INFO) << "Dropped video frames " << dropped_video_frames;
- LOG(INFO) << "Late video frames: " << late_video_frames;
+ LOG(INFO) << "Late video frames: " << late_video_frames
+ << " (average lateness: "
+ << (late_video_frames > 0 ?
+ static_cast<double>(total_delay_of_late_frames_ms) /
+ late_video_frames :
+ 0)
+ << " ms)";
LOG(INFO) << "Average encoded bitrate (kbps): " << avg_encoded_bitrate;
LOG(INFO) << "Average target bitrate (kbps): " << avg_target_bitrate;
LOG(INFO) << "Writing log: " << output_path.value();
diff --git a/media/cast/test/utility/default_config.cc b/media/cast/test/utility/default_config.cc
index ce81ebab29..2496e71fb8 100644
--- a/media/cast/test/utility/default_config.cc
+++ b/media/cast/test/utility/default_config.cc
@@ -62,7 +62,7 @@ AudioSenderConfig GetDefaultAudioSenderConfig() {
config.channels = recv_config.channels;
config.bitrate = kDefaultAudioEncoderBitrate;
config.codec = recv_config.codec;
- config.target_playout_delay =
+ config.max_playout_delay =
base::TimeDelta::FromMilliseconds(kDefaultRtpMaxDelayMs);
return config;
}
@@ -83,7 +83,7 @@ VideoSenderConfig GetDefaultVideoSenderConfig() {
config.max_number_of_video_buffers_used = 1;
config.codec = recv_config.codec;
config.number_of_encode_threads = 2;
- config.target_playout_delay =
+ config.max_playout_delay =
base::TimeDelta::FromMilliseconds(kDefaultRtpMaxDelayMs);
return config;
}
diff --git a/media/cast/test/utility/in_process_receiver.cc b/media/cast/test/utility/in_process_receiver.cc
index 6c739ecdb6..c5666d7b96 100644
--- a/media/cast/test/utility/in_process_receiver.cc
+++ b/media/cast/test/utility/in_process_receiver.cc
@@ -107,7 +107,7 @@ void InProcessReceiver::GotVideoFrame(
const base::TimeTicks& playout_time,
bool is_continuous) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
- if (video_frame)
+ if (video_frame.get())
OnVideoFrame(video_frame, playout_time, is_continuous);
PullNextVideoFrame();
}
diff --git a/media/cast/test/utility/udp_proxy.cc b/media/cast/test/utility/udp_proxy.cc
index 4dbac83fa5..95640a364e 100644
--- a/media/cast/test/utility/udp_proxy.cc
+++ b/media/cast/test/utility/udp_proxy.cc
@@ -438,7 +438,7 @@ void InterruptedPoissonProcess::InitOnIOThread(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
base::TickClock* clock) {
// Already initialized and started.
- if (task_runner_ && clock_)
+ if (task_runner_.get() && clock_)
return;
task_runner_ = task_runner;
clock_ = clock;
@@ -692,7 +692,7 @@ class UDPProxyImpl : public UDPProxy {
result = net::ERR_INVALID_ARGUMENT;
} else {
VLOG(1) << "Destination:" << destination.ToString();
- result = socket_->SendTo(buf,
+ result = socket_->SendTo(buf.get(),
static_cast<int>(buf_size),
destination,
base::Bind(&UDPProxyImpl::AllowWrite,
@@ -773,12 +773,11 @@ class UDPProxyImpl : public UDPProxy {
scoped_refptr<net::IOBuffer> recv_buf =
new net::WrappedIOBuffer(reinterpret_cast<char*>(&packet_->front()));
int len = socket_->RecvFrom(
- recv_buf,
+ recv_buf.get(),
kMaxPacketSize,
&recv_address_,
- base::Bind(&UDPProxyImpl::ReadCallback,
- base::Unretained(this),
- recv_buf));
+ base::Bind(
+ &UDPProxyImpl::ReadCallback, base::Unretained(this), recv_buf));
if (len == net::ERR_IO_PENDING)
break;
ProcessPacket(recv_buf, len);