summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Chromium Automerger <chromium-automerger@android>2014-07-09 15:00:07 +0000
committerAndroid Chromium Automerger <chromium-automerger@android>2014-07-09 15:00:07 +0000
commit10b9861436e47694b29da87b7890287041a95a74 (patch)
tree64d09be0638cccd20e1ca345ff522372b708ab67
parent3ded5808c2dec22ec97b3b0c855f1bcb75c1db09 (diff)
parent138adbb0bcdab60afda25a8727e5a071abc4ae36 (diff)
downloadwebrtc-10b9861436e47694b29da87b7890287041a95a74.tar.gz
Merge third_party/webrtc from https://chromium.googlesource.com/external/webrtc/trunk/webrtc.git at 138adbb0bcdab60afda25a8727e5a071abc4ae36
This commit was generated by merge_from_chromium.py. Change-Id: Iffa5413ebfb78de36b84b4e85d94adc093f912df
-rw-r--r--base/base.gyp2
-rw-r--r--base/timing.cc1
-rw-r--r--base/timing.h3
-rw-r--r--build/tsan_suppressions.cc9
-rw-r--r--build/webrtc.gni11
-rw-r--r--call.h4
-rw-r--r--common_audio/signal_processing/include/signal_processing_library.h2
-rw-r--r--common_audio/signal_processing/signal_processing_unittest.cc1
-rw-r--r--common_types.h15
-rw-r--r--common_video/BUILD.gn34
-rw-r--r--engine_configurations.h1
-rw-r--r--examples/android/media_demo/jni/video_engine_jni.cc6
-rw-r--r--examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java10
-rw-r--r--examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoEngine.java1
-rw-r--r--experiments.h10
-rw-r--r--modules/audio_device/dummy/file_audio_device_factory.cc7
-rw-r--r--modules/audio_processing/aec/aec_rdft_neon.c88
-rw-r--r--modules/audio_processing/aec/aec_resampler.c24
-rw-r--r--modules/audio_processing/aec/aec_resampler.h4
-rw-r--r--modules/audio_processing/aec/echo_cancellation.c40
-rw-r--r--modules/audio_processing/aec/include/echo_cancellation.h4
-rw-r--r--modules/audio_processing/aec/system_delay_unittest.cc7
-rw-r--r--modules/audio_processing/aecm/aecm_core_c.c33
-rw-r--r--modules/audio_processing/aecm/aecm_core_mips.c22
-rw-r--r--modules/audio_processing/audio_buffer.cc21
-rw-r--r--modules/audio_processing/audio_buffer.h3
-rw-r--r--modules/audio_processing/audio_processing_impl.cc4
-rw-r--r--modules/audio_processing/audio_processing_impl.h4
-rw-r--r--modules/audio_processing/echo_cancellation_impl.cc2
-rw-r--r--modules/audio_processing/include/audio_processing.h4
-rw-r--r--modules/audio_processing/include/mock_audio_processing.h4
-rw-r--r--modules/audio_processing/utility/delay_estimator.c160
-rw-r--r--modules/audio_processing/utility/delay_estimator.h29
-rw-r--r--modules/audio_processing/utility/delay_estimator_unittest.cc100
-rw-r--r--modules/audio_processing/utility/delay_estimator_wrapper.c42
-rw-r--r--modules/audio_processing/utility/delay_estimator_wrapper.h85
-rw-r--r--modules/desktop_capture/BUILD.gn6
-rw-r--r--modules/desktop_capture/OWNERS1
-rw-r--r--modules/desktop_capture/desktop_capture.gypi6
-rw-r--r--modules/desktop_capture/desktop_capture_options.h10
-rw-r--r--modules/desktop_capture/mac/full_screen_chrome_window_detector.cc244
-rw-r--r--modules/desktop_capture/mac/full_screen_chrome_window_detector.h69
-rw-r--r--modules/desktop_capture/mac/osx_version.cc54
-rw-r--r--modules/desktop_capture/mac/window_list_utils.cc62
-rw-r--r--modules/desktop_capture/mac/window_list_utils.h (renamed from modules/desktop_capture/mac/osx_version.h)12
-rw-r--r--modules/desktop_capture/mouse_cursor_monitor_mac.mm29
-rw-r--r--modules/desktop_capture/screen_capturer_mac.mm19
-rw-r--r--modules/desktop_capture/window_capturer_mac.mm62
-rw-r--r--modules/interface/module_common_types.h10
-rw-r--r--modules/pacing/include/mock/mock_paced_sender.h3
-rw-r--r--modules/pacing/include/paced_sender.h67
-rw-r--r--modules/pacing/paced_sender.cc115
-rw-r--r--modules/pacing/paced_sender_unittest.cc133
-rw-r--r--modules/remote_bitrate_estimator/bwe_simulations.cc30
-rw-r--r--modules/remote_bitrate_estimator/test/bwe_test.cc6
-rw-r--r--modules/remote_bitrate_estimator/test/bwe_test_framework.cc103
-rw-r--r--modules/remote_bitrate_estimator/test/bwe_test_framework.h44
-rw-r--r--modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc5
-rw-r--r--modules/rtp_rtcp/interface/rtp_header_parser.h5
-rw-r--r--modules/rtp_rtcp/interface/rtp_payload_registry.h25
-rw-r--r--modules/rtp_rtcp/interface/rtp_rtcp.h12
-rw-r--r--modules/rtp_rtcp/interface/rtp_rtcp_defines.h14
-rw-r--r--modules/rtp_rtcp/mocks/mock_rtp_rtcp.h3
-rw-r--r--modules/rtp_rtcp/source/H264/rtp_sender_h264.cc19
-rw-r--r--modules/rtp_rtcp/source/fec_receiver_impl.cc2
-rw-r--r--modules/rtp_rtcp/source/fec_test_helper.cc6
-rw-r--r--modules/rtp_rtcp/source/forward_error_correction.cc39
-rw-r--r--modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h28
-rw-r--r--modules/rtp_rtcp/source/producer_fec.cc2
-rw-r--r--modules/rtp_rtcp/source/receive_statistics_impl.cc11
-rw-r--r--modules/rtp_rtcp/source/rtcp_packet.cc6
-rw-r--r--modules/rtp_rtcp/source/rtcp_receiver.cc2
-rw-r--r--modules/rtp_rtcp/source/rtcp_sender.cc125
-rw-r--r--modules/rtp_rtcp/source/rtp_fec_unittest.cc183
-rw-r--r--modules/rtp_rtcp/source/rtp_header_parser.cc16
-rw-r--r--modules/rtp_rtcp/source/rtp_payload_registry.cc112
-rw-r--r--modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc31
-rw-r--r--modules/rtp_rtcp/source/rtp_receiver_audio.cc4
-rw-r--r--modules/rtp_rtcp/source/rtp_receiver_audio.h2
-rw-r--r--modules/rtp_rtcp/source/rtp_receiver_impl.cc8
-rw-r--r--modules/rtp_rtcp/source/rtp_receiver_video.cc27
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl.cc52
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl.h9
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc5
-rw-r--r--modules/rtp_rtcp/source/rtp_sender.cc245
-rw-r--r--modules/rtp_rtcp/source/rtp_sender.h104
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_audio.cc16
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_audio.h13
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_unittest.cc48
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video.cc12
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video.h9
-rw-r--r--modules/rtp_rtcp/source/rtp_utility.cc29
-rw-r--r--modules/rtp_rtcp/source/rtp_utility.h13
-rw-r--r--modules/rtp_rtcp/source/rtp_utility_unittest.cc23
-rw-r--r--modules/rtp_rtcp/test/testAPI/test_api.h4
-rw-r--r--modules/rtp_rtcp/test/testAPI/test_api_video.cc8
-rw-r--r--modules/rtp_rtcp/test/testFec/test_fec.cc19
-rw-r--r--modules/utility/source/rtp_dump_impl.h2
-rw-r--r--modules/video_coding/BUILD.gn42
-rw-r--r--modules/video_coding/main/interface/video_coding_defines.h1
-rw-r--r--modules/video_coding/main/source/codec_database.cc35
-rw-r--r--modules/video_coding/main/source/internal_defines.h13
-rw-r--r--modules/video_coding/main/source/video_coding_impl.h25
-rw-r--r--modules/video_coding/main/source/video_receiver.cc28
-rw-r--r--modules/video_coding/main/test/pcap_file_reader.cc3
-rw-r--r--modules/video_coding/main/test/pcap_file_reader_unittest.cc2
-rw-r--r--modules/video_processing/BUILD.gn47
-rw-r--r--system_wrappers/source/condition_variable_unittest.cc7
-rw-r--r--system_wrappers/source/critical_section_unittest.cc9
-rw-r--r--system_wrappers/source/logging_unittest.cc9
-rw-r--r--system_wrappers/source/system_wrappers_tests.gyp1
-rw-r--r--system_wrappers/source/unittest_utilities.h82
-rw-r--r--system_wrappers/source/unittest_utilities_unittest.cc34
-rw-r--r--test/OWNERS3
-rw-r--r--test/call_test.cc14
-rw-r--r--test/call_test.h4
-rw-r--r--test/encoder_settings.cc16
-rw-r--r--test/rtp_rtcp_observer.h4
-rw-r--r--video/OWNERS3
-rw-r--r--video/bitrate_estimator_tests.cc147
-rw-r--r--video/call.cc80
-rw-r--r--video/call_perf_tests.cc2
-rw-r--r--video/end_to_end_tests.cc275
-rw-r--r--video/full_stack.cc4
-rw-r--r--video/loopback.cc19
-rw-r--r--video/rampup_tests.cc889
-rw-r--r--video/rampup_tests.h159
-rw-r--r--video/video_send_stream.cc43
-rw-r--r--video/video_send_stream.h8
-rw-r--r--video/video_send_stream_tests.cc141
-rw-r--r--video_engine/include/vie_base.h41
-rw-r--r--video_engine/include/vie_rtp_rtcp.h15
-rw-r--r--video_engine/test/auto_test/automated/vie_network_test.cc2
-rw-r--r--video_engine/test/auto_test/source/vie_autotest.cc3
-rw-r--r--video_engine/vie_base_impl.cc31
-rw-r--r--video_engine/vie_base_impl.h5
-rw-r--r--video_engine/vie_channel.cc34
-rw-r--r--video_engine/vie_channel.h41
-rw-r--r--video_engine/vie_codec_impl.cc16
-rw-r--r--video_engine/vie_encoder.cc24
-rw-r--r--video_engine/vie_rtp_rtcp_impl.cc24
-rw-r--r--video_engine/vie_rtp_rtcp_impl.h5
-rw-r--r--video_engine/vie_sender.cc1
-rw-r--r--video_receive_stream.h2
-rw-r--r--voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc4
-rw-r--r--webrtc_examples.gyp1
-rw-r--r--webrtc_tests.gypi1
147 files changed, 3713 insertions, 1866 deletions
diff --git a/base/base.gyp b/base/base.gyp
index f05c3251..f9ba4041 100644
--- a/base/base.gyp
+++ b/base/base.gyp
@@ -711,7 +711,7 @@
'scoped_autorelease_pool.mm',
],
}],
- ['OS=="ios" or os_posix==0', {
+ ['OS=="ios"', {
'sources!': [
'openssl.h',
'openssladapter.cc',
diff --git a/base/timing.cc b/base/timing.cc
index aa1fc429..0c5ed5e1 100644
--- a/base/timing.cc
+++ b/base/timing.cc
@@ -46,6 +46,7 @@ Timing::~Timing() {
#endif
}
+// static
double Timing::WallTimeNow() {
#if defined(WEBRTC_POSIX)
struct timeval time;
diff --git a/base/timing.h b/base/timing.h
index 58b17a9f..1dee6076 100644
--- a/base/timing.h
+++ b/base/timing.h
@@ -24,7 +24,8 @@ class Timing {
// WallTimeNow() returns the current wall-clock time in seconds,
// within 10 milliseconds resolution.
- virtual double WallTimeNow();
+ // WallTimeNow is static and does not require a timer_handle_ on Windows.
+ static double WallTimeNow();
// TimerNow() is like WallTimeNow(), but is monotonically
// increasing. It returns seconds in resolution of 10 microseconds
diff --git a/build/tsan_suppressions.cc b/build/tsan_suppressions.cc
index 4002d3c2..02e13af2 100644
--- a/build/tsan_suppressions.cc
+++ b/build/tsan_suppressions.cc
@@ -23,9 +23,11 @@ char kTSanDefaultSuppressions[] =
// WebRTC specific suppressions.
-// False positive in system wrappers.
-// https://code.google.com/p/webrtc/issues/detail?id=300
-"race:webrtc/system_wrappers/source/thread_posix.cc\n"
+// Usage of trace callback and trace level is racy in libjingle_media_unittests.
+// https://code.google.com/p/webrtc/issues/detail?id=3372
+"race:webrtc::TraceImpl::WriteToFile\n"
+"race:webrtc::VideoEngine::SetTraceFilter\n"
+"race:webrtc::VoiceEngine::SetTraceFilter\n"
// Audio processing
// https://code.google.com/p/webrtc/issues/detail?id=2521 for details.
@@ -65,7 +67,6 @@ char kTSanDefaultSuppressions[] =
"deadlock:webrtc::ProcessThreadImpl::RegisterModule\n"
"deadlock:webrtc::RTCPReceiver::SetSsrcs\n"
"deadlock:webrtc::RTPSenderAudio::RegisterAudioPayload\n"
-"deadlock:webrtc/system_wrappers/source/logging_unittest.cc\n"
"deadlock:webrtc::test::UdpSocketManagerPosixImpl::RemoveSocket\n"
"deadlock:webrtc::vcm::VideoReceiver::RegisterPacketRequestCallback\n"
"deadlock:webrtc::VideoSendStreamTest_SuspendBelowMinBitrate_Test::TestBody\n"
diff --git a/build/webrtc.gni b/build/webrtc.gni
index 409ae1d2..fcbf04f7 100644
--- a/build/webrtc.gni
+++ b/build/webrtc.gni
@@ -25,6 +25,9 @@ declare_args() {
# when building voice engine exclusively.
enable_video = true
+ # Disable this to not build libvpx and instead use an externally provided lib.
+ build_libvpx = true
+
# Selects fixed-point code where possible.
prefer_fixed_point = false
@@ -32,10 +35,16 @@ declare_args() {
# which can be easily parsed for offline processing.
enable_data_logging = false
- build_libjpeg = true
# Enables the use of protocol buffers for debug recordings.
enable_protobuf = true
+ # Disable these to not build components which can be externally provided.
+ build_json = true
+ build_libjpeg = true
+ build_libyuv = true
+ build_libvpx = true
+ build_ssl = true
+
# Disable by default.
have_dbus_glib = false
diff --git a/call.h b/call.h
index 480d73ec..86cf1c6a 100644
--- a/call.h
+++ b/call.h
@@ -86,8 +86,6 @@ class Call {
static Call* Create(const Call::Config& config,
const webrtc::Config& webrtc_config);
- virtual VideoSendStream::Config GetDefaultSendConfig() = 0;
-
virtual VideoSendStream* CreateVideoSendStream(
const VideoSendStream::Config& config,
const std::vector<VideoStream>& video_streams,
@@ -95,8 +93,6 @@ class Call {
virtual void DestroyVideoSendStream(VideoSendStream* send_stream) = 0;
- virtual VideoReceiveStream::Config GetDefaultReceiveConfig() = 0;
-
virtual VideoReceiveStream* CreateVideoReceiveStream(
const VideoReceiveStream::Config& config) = 0;
virtual void DestroyVideoReceiveStream(
diff --git a/common_audio/signal_processing/include/signal_processing_library.h b/common_audio/signal_processing/include/signal_processing_library.h
index 3a5d51cc..a658645d 100644
--- a/common_audio/signal_processing/include/signal_processing_library.h
+++ b/common_audio/signal_processing/include/signal_processing_library.h
@@ -111,8 +111,6 @@
// Shifting with negative numbers allowed
// Positive means left shift
-#define WEBRTC_SPL_SHIFT_W16(x, c) \
- (((c) >= 0) ? ((x) << (c)) : ((x) >> (-(c))))
#define WEBRTC_SPL_SHIFT_W32(x, c) \
(((c) >= 0) ? ((x) << (c)) : ((x) >> (-(c))))
diff --git a/common_audio/signal_processing/signal_processing_unittest.cc b/common_audio/signal_processing/signal_processing_unittest.cc
index 81ca3694..a68840e8 100644
--- a/common_audio/signal_processing/signal_processing_unittest.cc
+++ b/common_audio/signal_processing/signal_processing_unittest.cc
@@ -79,7 +79,6 @@ TEST_F(SplTest, MacroTest) {
// Shifting with negative numbers allowed
int shift_amount = 1; // Workaround compiler warning using variable here.
// Positive means left shift
- EXPECT_EQ(32766, WEBRTC_SPL_SHIFT_W16(a, shift_amount));
EXPECT_EQ(32766, WEBRTC_SPL_SHIFT_W32(a, shift_amount));
// Shifting with negative numbers not allowed
diff --git a/common_types.h b/common_types.h
index 6892a83f..193e2818 100644
--- a/common_types.h
+++ b/common_types.h
@@ -587,10 +587,24 @@ struct VideoCodecVP8 {
}
};
+// H264 specific.
+struct VideoCodecH264
+{
+ VideoCodecProfile profile;
+ bool frameDroppingOn;
+ int keyFrameInterval;
+ // These are NULL/0 if not externally negotiated.
+ const uint8_t* spsData;
+ size_t spsLen;
+ const uint8_t* ppsData;
+ size_t ppsLen;
+};
+
// Video codec types
enum VideoCodecType
{
kVideoCodecVP8,
+ kVideoCodecH264,
kVideoCodecI420,
kVideoCodecRED,
kVideoCodecULPFEC,
@@ -601,6 +615,7 @@ enum VideoCodecType
union VideoCodecUnion
{
VideoCodecVP8 VP8;
+ VideoCodecH264 H264;
};
diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn
index 2e877e6b..69915e34 100644
--- a/common_video/BUILD.gn
+++ b/common_video/BUILD.gn
@@ -8,6 +8,38 @@
import("../build/webrtc.gni")
+config("common_video_config") {
+ include_dirs = [
+ "interface",
+ "libyuv/include",
+ ]
+}
+
source_set("common_video") {
- # TODO(pbos): Implement.
+ sources = [
+ "i420_video_frame.cc",
+ "interface/i420_video_frame.h",
+ "interface/native_handle.h",
+ "interface/texture_video_frame.h",
+ "libyuv/include/scaler.h",
+ "libyuv/include/webrtc_libyuv.h",
+ "libyuv/scaler.cc",
+ "libyuv/webrtc_libyuv.cc",
+ "plane.cc",
+ "plane.h",
+ "texture_video_frame.cc"
+ ]
+
+ include_dirs = [ "../modules/interface" ]
+
+ direct_dependent_configs = [ ":common_video_config" ]
+
+ deps = [ "../system_wrappers" ]
+
+ if (build_libyuv) {
+ deps += [ "//third_party/libyuv" ]
+ } else {
+ # Need to add a directory normally exported by libyuv.
+ include_dirs += [ "//third_party/libyuv/include" ]
+ }
}
diff --git a/engine_configurations.h b/engine_configurations.h
index be858b8e..e9f23097 100644
--- a/engine_configurations.h
+++ b/engine_configurations.h
@@ -51,6 +51,7 @@
#define VIDEOCODEC_I420
#define VIDEOCODEC_VP8
+#define VIDEOCODEC_H264
// ============================================================================
// VoiceEngine
diff --git a/examples/android/media_demo/jni/video_engine_jni.cc b/examples/android/media_demo/jni/video_engine_jni.cc
index c6589ece..c7af1c54 100644
--- a/examples/android/media_demo/jni/video_engine_jni.cc
+++ b/examples/android/media_demo/jni/video_engine_jni.cc
@@ -710,3 +710,9 @@ JOWW(void, VideoCodecInst_setMaxFrameRate)(JNIEnv* jni, jobject j_codec,
JOWW(void, CameraDesc_dispose)(JNIEnv* jni, jobject j_camera) {
delete GetCameraDesc(jni, j_camera);
}
+
+JOWW(jint, VideoEngine_setLocalSSRC)(JNIEnv* jni, jobject j_vie, jint channel,
+ jint ssrc) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->rtp->SetLocalSSRC(channel, ssrc);
+}
diff --git a/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java b/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
index f6284315..f55bcccf 100644
--- a/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
+++ b/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
@@ -259,7 +259,7 @@ public class MediaEngine implements VideoDecodeEncodeObserver {
public void setTrace(boolean enable) {
if (enable) {
vie.setTraceFile("/sdcard/trace.txt", false);
- vie.setTraceFilter(VideoEngine.TraceLevel.TRACE_ERROR);
+ vie.setTraceFilter(VideoEngine.TraceLevel.TRACE_ALL);
return;
}
vie.setTraceFilter(VideoEngine.TraceLevel.TRACE_NONE);
@@ -561,6 +561,14 @@ public class MediaEngine implements VideoDecodeEncodeObserver {
check(vie.setSendDestination(videoChannel, videoTxPort, remoteIp) == 0,
"Failed setSendDestination");
}
+
+ // Setting localSSRC manually (arbitrary value) for loopback test,
+ // As otherwise we will get a clash and a new SSRC will be set,
+ // Which will reset the receiver and other minor issues.
+ if (remoteIp.equals("127.0.0.1")) {
+ check(vie.setLocalSSRC(videoChannel, 0x01234567) == 0,
+ "Failed setLocalSSRC");
+ }
}
public int videoTxPort() {
diff --git a/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoEngine.java b/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoEngine.java
index 7cfa5856..677ff42f 100644
--- a/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoEngine.java
+++ b/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoEngine.java
@@ -116,4 +116,5 @@ public class VideoEngine {
return stopRtpDump(channel, direction.ordinal());
}
private native int stopRtpDump(int channel, int direction);
+ public native int setLocalSSRC(int channel, int ssrc);
}
diff --git a/experiments.h b/experiments.h
index b03d248c..3b019b48 100644
--- a/experiments.h
+++ b/experiments.h
@@ -14,16 +14,6 @@
#include "webrtc/typedefs.h"
namespace webrtc {
-struct PaddingStrategy {
- PaddingStrategy()
- : redundant_payloads(false) {}
- explicit PaddingStrategy(bool redundant_payloads)
- : redundant_payloads(redundant_payloads) {}
- virtual ~PaddingStrategy() {}
-
- const bool redundant_payloads;
-};
-
struct RemoteBitrateEstimatorMinRate {
RemoteBitrateEstimatorMinRate() : min_rate(30000) {}
RemoteBitrateEstimatorMinRate(uint32_t min_rate) : min_rate(min_rate) {}
diff --git a/modules/audio_device/dummy/file_audio_device_factory.cc b/modules/audio_device/dummy/file_audio_device_factory.cc
index db35bf11..5e252305 100644
--- a/modules/audio_device/dummy/file_audio_device_factory.cc
+++ b/modules/audio_device/dummy/file_audio_device_factory.cc
@@ -32,12 +32,19 @@ FileAudioDevice* FileAudioDeviceFactory::CreateFileAudioDevice(
void FileAudioDeviceFactory::SetFilenamesToUse(
const char* inputAudioFilename, const char* outputAudioFilename) {
+#ifdef WEBRTC_DUMMY_FILE_DEVICES
assert(strlen(inputAudioFilename) < MAX_FILENAME_LEN &&
strlen(outputAudioFilename) < MAX_FILENAME_LEN);
// Copy the strings since we don't know the lifetime of the input pointers.
strncpy(_inputAudioFilename, inputAudioFilename, MAX_FILENAME_LEN);
strncpy(_outputAudioFilename, outputAudioFilename, MAX_FILENAME_LEN);
+#else
+ // Sanity: must be compiled with the right define to run this.
+ printf("Trying to use dummy file devices, but is not compiled "
+ "with WEBRTC_DUMMY_FILE_DEVICES. Bailing out.\n");
+ exit(1);
+#endif
}
} // namespace webrtc
diff --git a/modules/audio_processing/aec/aec_rdft_neon.c b/modules/audio_processing/aec/aec_rdft_neon.c
index 80892d26..a9c79b7b 100644
--- a/modules/audio_processing/aec/aec_rdft_neon.c
+++ b/modules/audio_processing/aec/aec_rdft_neon.c
@@ -178,8 +178,96 @@ static void cftmdl_128_neon(float* a) {
}
}
+__inline static float32x4_t reverse_order_f32x4(float32x4_t in) {
+ // A B C D -> C D A B
+ const float32x4_t rev = vcombine_f32(vget_high_f32(in), vget_low_f32(in));
+ // C D A B -> D C B A
+ return vrev64q_f32(rev);
+}
+
+static void rftfsub_128_neon(float* a) {
+ const float* c = rdft_w + 32;
+ int j1, j2, k1, k2;
+ float wkr, wki, xr, xi, yr, yi;
+ const float32x4_t mm_half = vdupq_n_f32(0.5f);
+
+ // Vectorized code (four at once).
+ // Note: commented number are indexes for the first iteration of the loop.
+ for (j1 = 1, j2 = 2; j2 + 7 < 64; j1 += 4, j2 += 8) {
+ // Load 'wk'.
+ const float32x4_t c_j1 = vld1q_f32(&c[j1]); // 1, 2, 3, 4,
+ const float32x4_t c_k1 = vld1q_f32(&c[29 - j1]); // 28, 29, 30, 31,
+ const float32x4_t wkrt = vsubq_f32(mm_half, c_k1); // 28, 29, 30, 31,
+ const float32x4_t wkr_ = reverse_order_f32x4(wkrt);
+ const float32x4_t wki_ = c_j1; // 1, 2, 3, 4,
+ // Load and shuffle 'a'.
+ // 2, 4, 6, 8, 3, 5, 7, 9
+ float32x4x2_t a_j2_p = vld2q_f32(&a[0 + j2]);
+ // 120, 122, 124, 126, 121, 123, 125, 127,
+ const float32x4x2_t k2_0_4 = vld2q_f32(&a[122 - j2]);
+ // 126, 124, 122, 120
+ const float32x4_t a_k2_p0 = reverse_order_f32x4(k2_0_4.val[0]);
+ // 127, 125, 123, 121
+ const float32x4_t a_k2_p1 = reverse_order_f32x4(k2_0_4.val[1]);
+ // Calculate 'x'.
+ const float32x4_t xr_ = vsubq_f32(a_j2_p.val[0], a_k2_p0);
+ // 2-126, 4-124, 6-122, 8-120,
+ const float32x4_t xi_ = vaddq_f32(a_j2_p.val[1], a_k2_p1);
+ // 3-127, 5-125, 7-123, 9-121,
+ // Calculate product into 'y'.
+ // yr = wkr * xr - wki * xi;
+ // yi = wkr * xi + wki * xr;
+ const float32x4_t a_ = vmulq_f32(wkr_, xr_);
+ const float32x4_t b_ = vmulq_f32(wki_, xi_);
+ const float32x4_t c_ = vmulq_f32(wkr_, xi_);
+ const float32x4_t d_ = vmulq_f32(wki_, xr_);
+ const float32x4_t yr_ = vsubq_f32(a_, b_); // 2-126, 4-124, 6-122, 8-120,
+ const float32x4_t yi_ = vaddq_f32(c_, d_); // 3-127, 5-125, 7-123, 9-121,
+ // Update 'a'.
+ // a[j2 + 0] -= yr;
+ // a[j2 + 1] -= yi;
+ // a[k2 + 0] += yr;
+ // a[k2 + 1] -= yi;
+ // 126, 124, 122, 120,
+ const float32x4_t a_k2_p0n = vaddq_f32(a_k2_p0, yr_);
+ // 127, 125, 123, 121,
+ const float32x4_t a_k2_p1n = vsubq_f32(a_k2_p1, yi_);
+ // Shuffle in right order and store.
+ const float32x4_t a_k2_p0nr = vrev64q_f32(a_k2_p0n);
+ const float32x4_t a_k2_p1nr = vrev64q_f32(a_k2_p1n);
+ // 124, 125, 126, 127, 120, 121, 122, 123
+ const float32x4x2_t a_k2_n = vzipq_f32(a_k2_p0nr, a_k2_p1nr);
+ // 2, 4, 6, 8,
+ a_j2_p.val[0] = vsubq_f32(a_j2_p.val[0], yr_);
+ // 3, 5, 7, 9,
+ a_j2_p.val[1] = vsubq_f32(a_j2_p.val[1], yi_);
+ // 2, 3, 4, 5, 6, 7, 8, 9,
+ vst2q_f32(&a[0 + j2], a_j2_p);
+
+ vst1q_f32(&a[122 - j2], a_k2_n.val[1]);
+ vst1q_f32(&a[126 - j2], a_k2_n.val[0]);
+ }
+
+ // Scalar code for the remaining items.
+ for (; j2 < 64; j1 += 1, j2 += 2) {
+ k2 = 128 - j2;
+ k1 = 32 - j1;
+ wkr = 0.5f - c[k1];
+ wki = c[j1];
+ xr = a[j2 + 0] - a[k2 + 0];
+ xi = a[j2 + 1] + a[k2 + 1];
+ yr = wkr * xr - wki * xi;
+ yi = wkr * xi + wki * xr;
+ a[j2 + 0] -= yr;
+ a[j2 + 1] -= yi;
+ a[k2 + 0] += yr;
+ a[k2 + 1] -= yi;
+ }
+}
+
void aec_rdft_init_neon(void) {
cft1st_128 = cft1st_128_neon;
cftmdl_128 = cftmdl_128_neon;
+ rftfsub_128 = rftfsub_128_neon;
}
diff --git a/modules/audio_processing/aec/aec_resampler.c b/modules/audio_processing/aec/aec_resampler.c
index 5382665e..469b8111 100644
--- a/modules/audio_processing/aec/aec_resampler.c
+++ b/modules/audio_processing/aec/aec_resampler.c
@@ -26,7 +26,7 @@ enum {
};
typedef struct {
- short buffer[kResamplerBufferSize];
+ float buffer[kResamplerBufferSize];
float position;
int deviceSampleRateHz;
@@ -71,15 +71,15 @@ int WebRtcAec_FreeResampler(void* resampInst) {
}
void WebRtcAec_ResampleLinear(void* resampInst,
- const short* inspeech,
+ const float* inspeech,
int size,
float skew,
- short* outspeech,
+ float* outspeech,
int* size_out) {
resampler_t* obj = (resampler_t*)resampInst;
- short* y;
- float be, tnew, interp;
+ float* y;
+ float be, tnew;
int tn, mm;
assert(!(size < 0 || size > 2 * FRAME_LEN));
@@ -91,7 +91,7 @@ void WebRtcAec_ResampleLinear(void* resampInst,
// Add new frame data in lookahead
memcpy(&obj->buffer[FRAME_LEN + kResamplingDelay],
inspeech,
- size * sizeof(short));
+ size * sizeof(inspeech[0]));
// Sample rate ratio
be = 1 + skew;
@@ -106,15 +106,7 @@ void WebRtcAec_ResampleLinear(void* resampInst,
while (tn < size) {
// Interpolation
- interp = y[tn] + (tnew - tn) * (y[tn + 1] - y[tn]);
-
- if (interp > 32767) {
- interp = 32767;
- } else if (interp < -32768) {
- interp = -32768;
- }
-
- outspeech[mm] = (short)interp;
+ outspeech[mm] = y[tn] + (tnew - tn) * (y[tn + 1] - y[tn]);
mm++;
tnew = be * mm + obj->position;
@@ -127,7 +119,7 @@ void WebRtcAec_ResampleLinear(void* resampInst,
// Shift buffer
memmove(obj->buffer,
&obj->buffer[size],
- (kResamplerBufferSize - size) * sizeof(short));
+ (kResamplerBufferSize - size) * sizeof(obj->buffer[0]));
}
int WebRtcAec_GetSkew(void* resampInst, int rawSkew, float* skewEst) {
diff --git a/modules/audio_processing/aec/aec_resampler.h b/modules/audio_processing/aec/aec_resampler.h
index e42c056f..73e28212 100644
--- a/modules/audio_processing/aec/aec_resampler.h
+++ b/modules/audio_processing/aec/aec_resampler.h
@@ -30,10 +30,10 @@ int WebRtcAec_GetSkew(void* resampInst, int rawSkew, float* skewEst);
// Resamples input using linear interpolation.
void WebRtcAec_ResampleLinear(void* resampInst,
- const short* inspeech,
+ const float* inspeech,
int size,
float skew,
- short* outspeech,
+ float* outspeech,
int* size_out);
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_RESAMPLER_H_
diff --git a/modules/audio_processing/aec/echo_cancellation.c b/modules/audio_processing/aec/echo_cancellation.c
index ba3b9243..b58edcbb 100644
--- a/modules/audio_processing/aec/echo_cancellation.c
+++ b/modules/audio_processing/aec/echo_cancellation.c
@@ -294,17 +294,12 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
// only buffer L band for farend
int32_t WebRtcAec_BufferFarend(void* aecInst,
- const int16_t* farend,
+ const float* farend,
int16_t nrOfSamples) {
aecpc_t* aecpc = aecInst;
- int32_t retVal = 0;
int newNrOfSamples = (int)nrOfSamples;
- short newFarend[MAX_RESAMP_LEN];
- const int16_t* farend_ptr = farend;
- float tmp_farend[MAX_RESAMP_LEN];
- const float* farend_float = tmp_farend;
- float skew;
- int i = 0;
+ float new_farend[MAX_RESAMP_LEN];
+ const float* farend_ptr = farend;
if (farend == NULL) {
aecpc->lastError = AEC_NULL_POINTER_ERROR;
@@ -322,17 +317,15 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
return -1;
}
- skew = aecpc->skew;
-
if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) {
// Resample and get a new number of samples
WebRtcAec_ResampleLinear(aecpc->resampler,
farend,
nrOfSamples,
- skew,
- newFarend,
+ aecpc->skew,
+ new_farend,
&newNrOfSamples);
- farend_ptr = (const int16_t*)newFarend;
+ farend_ptr = new_farend;
}
aecpc->farend_started = 1;
@@ -343,32 +336,31 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
WebRtc_WriteBuffer(
aecpc->far_pre_buf_s16, farend_ptr, (size_t)newNrOfSamples);
#endif
- // Cast to float and write the time-domain data to |far_pre_buf|.
- for (i = 0; i < newNrOfSamples; i++) {
- tmp_farend[i] = (float)farend_ptr[i];
- }
- WebRtc_WriteBuffer(aecpc->far_pre_buf, farend_float, (size_t)newNrOfSamples);
+ // Write the time-domain data to |far_pre_buf|.
+ WebRtc_WriteBuffer(aecpc->far_pre_buf, farend_ptr, (size_t)newNrOfSamples);
// Transform to frequency domain if we have enough data.
while (WebRtc_available_read(aecpc->far_pre_buf) >= PART_LEN2) {
// We have enough data to pass to the FFT, hence read PART_LEN2 samples.
- WebRtc_ReadBuffer(
- aecpc->far_pre_buf, (void**)&farend_float, tmp_farend, PART_LEN2);
-
- WebRtcAec_BufferFarendPartition(aecpc->aec, farend_float);
+ {
+ float* ptmp;
+ float tmp[PART_LEN2];
+ WebRtc_ReadBuffer(aecpc->far_pre_buf, (void**)&ptmp, tmp, PART_LEN2);
+ WebRtcAec_BufferFarendPartition(aecpc->aec, ptmp);
+ }
// Rewind |far_pre_buf| PART_LEN samples for overlap before continuing.
WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN);
#ifdef WEBRTC_AEC_DEBUG_DUMP
WebRtc_ReadBuffer(
- aecpc->far_pre_buf_s16, (void**)&farend_ptr, newFarend, PART_LEN2);
+ aecpc->far_pre_buf_s16, (void**)&farend_ptr, new_farend, PART_LEN2);
WebRtc_WriteBuffer(
WebRtcAec_far_time_buf(aecpc->aec), &farend_ptr[PART_LEN], 1);
WebRtc_MoveReadPtr(aecpc->far_pre_buf_s16, -PART_LEN);
#endif
}
- return retVal;
+ return 0;
}
int32_t WebRtcAec_Process(void* aecInst,
diff --git a/modules/audio_processing/aec/include/echo_cancellation.h b/modules/audio_processing/aec/include/echo_cancellation.h
index dc64a345..0cf6a5a4 100644
--- a/modules/audio_processing/aec/include/echo_cancellation.h
+++ b/modules/audio_processing/aec/include/echo_cancellation.h
@@ -114,7 +114,7 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq);
* Inputs Description
* -------------------------------------------------------------------
* void* aecInst Pointer to the AEC instance
- * int16_t* farend In buffer containing one frame of
+ * const float* farend In buffer containing one frame of
* farend signal for L band
* int16_t nrOfSamples Number of samples in farend buffer
*
@@ -124,7 +124,7 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq);
* -1: error
*/
int32_t WebRtcAec_BufferFarend(void* aecInst,
- const int16_t* farend,
+ const float* farend,
int16_t nrOfSamples);
/*
diff --git a/modules/audio_processing/aec/system_delay_unittest.cc b/modules/audio_processing/aec/system_delay_unittest.cc
index 5fbc5600..f81ce476 100644
--- a/modules/audio_processing/aec/system_delay_unittest.cc
+++ b/modules/audio_processing/aec/system_delay_unittest.cc
@@ -47,7 +47,7 @@ class SystemDelayTest : public ::testing::Test {
int samples_per_frame_;
// Dummy input/output speech data.
static const int kSamplesPerChunk = 160;
- int16_t far_[kSamplesPerChunk];
+ float far_[kSamplesPerChunk];
float near_[kSamplesPerChunk];
float out_[kSamplesPerChunk];
};
@@ -55,9 +55,10 @@ class SystemDelayTest : public ::testing::Test {
SystemDelayTest::SystemDelayTest()
: handle_(NULL), self_(NULL), samples_per_frame_(0) {
// Dummy input data are set with more or less arbitrary non-zero values.
- memset(far_, 1, sizeof(far_));
- for (int i = 0; i < kSamplesPerChunk; i++)
+ for (int i = 0; i < kSamplesPerChunk; i++) {
+ far_[i] = 257.0;
near_[i] = 514.0;
+ }
memset(out_, 0, sizeof(out_));
}
diff --git a/modules/audio_processing/aecm/aecm_core_c.c b/modules/audio_processing/aecm/aecm_core_c.c
index f8491e97..00323ccd 100644
--- a/modules/audio_processing/aecm/aecm_core_c.c
+++ b/modules/audio_processing/aecm/aecm_core_c.c
@@ -328,7 +328,7 @@ int WebRtcAecm_ProcessBlock(AecmCore_t * aecm,
int16_t zeros32, zeros16;
int16_t zerosDBufNoisy, zerosDBufClean, zerosXBuf;
int far_q;
- int16_t resolutionDiff, qDomainDiff;
+ int16_t resolutionDiff, qDomainDiff, dfa_clean_q_domain_diff;
const int kMinPrefBand = 4;
const int kMaxPrefBand = 24;
@@ -499,29 +499,28 @@ int WebRtcAecm_ProcessBlock(AecmCore_t * aecm,
}
zeros16 = WebRtcSpl_NormW16(aecm->nearFilt[i]);
- if ((zeros16 < (aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld))
- & (aecm->nearFilt[i]))
- {
- tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], zeros16);
- qDomainDiff = zeros16 - aecm->dfaCleanQDomain + aecm->dfaCleanQDomainOld;
- } else
- {
- tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i],
- aecm->dfaCleanQDomain -
- aecm->dfaCleanQDomainOld);
+ assert(zeros16 >= 0); // |zeros16| is a norm, hence non-negative.
+ dfa_clean_q_domain_diff = aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld;
+ if (zeros16 < dfa_clean_q_domain_diff && aecm->nearFilt[i]) {
+ tmp16no1 = aecm->nearFilt[i] << zeros16;
+ qDomainDiff = zeros16 - dfa_clean_q_domain_diff;
+ tmp16no2 = ptrDfaClean[i] >> -qDomainDiff;
+ } else {
+ tmp16no1 = dfa_clean_q_domain_diff < 0
+ ? aecm->nearFilt[i] >> -dfa_clean_q_domain_diff
+ : aecm->nearFilt[i] << dfa_clean_q_domain_diff;
qDomainDiff = 0;
+ tmp16no2 = ptrDfaClean[i];
}
- tmp16no2 = WEBRTC_SPL_SHIFT_W16(ptrDfaClean[i], qDomainDiff);
tmp32no1 = (int32_t)(tmp16no2 - tmp16no1);
tmp16no2 = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 4);
tmp16no2 += tmp16no1;
zeros16 = WebRtcSpl_NormW16(tmp16no2);
- if ((tmp16no2) & (-qDomainDiff > zeros16))
- {
+ if ((tmp16no2) & (-qDomainDiff > zeros16)) {
aecm->nearFilt[i] = WEBRTC_SPL_WORD16_MAX;
- } else
- {
- aecm->nearFilt[i] = WEBRTC_SPL_SHIFT_W16(tmp16no2, -qDomainDiff);
+ } else {
+ aecm->nearFilt[i] = qDomainDiff < 0 ? tmp16no2 << -qDomainDiff
+ : tmp16no2 >> qDomainDiff;
}
// Wiener filter coefficients, resulting hnl in Q14
diff --git a/modules/audio_processing/aecm/aecm_core_mips.c b/modules/audio_processing/aecm/aecm_core_mips.c
index 6a231b38..31f232b8 100644
--- a/modules/audio_processing/aecm/aecm_core_mips.c
+++ b/modules/audio_processing/aecm/aecm_core_mips.c
@@ -836,7 +836,7 @@ int WebRtcAecm_ProcessBlock(AecmCore_t* aecm,
int16_t zeros32, zeros16;
int16_t zerosDBufNoisy, zerosDBufClean, zerosXBuf;
int far_q;
- int16_t resolutionDiff, qDomainDiff;
+ int16_t resolutionDiff, qDomainDiff, dfa_clean_q_domain_diff;
const int kMinPrefBand = 4;
const int kMaxPrefBand = 24;
@@ -1002,15 +1002,16 @@ int WebRtcAecm_ProcessBlock(AecmCore_t* aecm,
}
zeros16 = WebRtcSpl_NormW16(aecm->nearFilt[i]);
- if ((zeros16 < (aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld))
- & (aecm->nearFilt[i])) {
- tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i], zeros16);
- qDomainDiff = zeros16 - aecm->dfaCleanQDomain + aecm->dfaCleanQDomainOld;
- tmp16no2 = WEBRTC_SPL_SHIFT_W16(ptrDfaClean[i], qDomainDiff);
+ assert(zeros16 >= 0); // |zeros16| is a norm, hence non-negative.
+ dfa_clean_q_domain_diff = aecm->dfaCleanQDomain - aecm->dfaCleanQDomainOld;
+ if (zeros16 < dfa_clean_q_domain_diff && aecm->nearFilt[i]) {
+ tmp16no1 = aecm->nearFilt[i] << zeros16;
+ qDomainDiff = zeros16 - dfa_clean_q_domain_diff;
+ tmp16no2 = ptrDfaClean[i] >> -qDomainDiff;
} else {
- tmp16no1 = WEBRTC_SPL_SHIFT_W16(aecm->nearFilt[i],
- aecm->dfaCleanQDomain
- - aecm->dfaCleanQDomainOld);
+ tmp16no1 = dfa_clean_q_domain_diff < 0
+ ? aecm->nearFilt[i] >> -dfa_clean_q_domain_diff
+ : aecm->nearFilt[i] << dfa_clean_q_domain_diff;
qDomainDiff = 0;
tmp16no2 = ptrDfaClean[i];
}
@@ -1022,7 +1023,8 @@ int WebRtcAecm_ProcessBlock(AecmCore_t* aecm,
if ((tmp16no2) & (-qDomainDiff > zeros16)) {
aecm->nearFilt[i] = WEBRTC_SPL_WORD16_MAX;
} else {
- aecm->nearFilt[i] = WEBRTC_SPL_SHIFT_W16(tmp16no2, -qDomainDiff);
+ aecm->nearFilt[i] = qDomainDiff < 0 ? tmp16no2 << -qDomainDiff
+ : tmp16no2 >> qDomainDiff;
}
// Wiener filter coefficients, resulting hnl in Q14
diff --git a/modules/audio_processing/audio_buffer.cc b/modules/audio_processing/audio_buffer.cc
index b0f1eb6c..35e1eb7c 100644
--- a/modules/audio_processing/audio_buffer.cc
+++ b/modules/audio_processing/audio_buffer.cc
@@ -294,11 +294,16 @@ int16_t* AudioBuffer::data(int channel) {
return const_cast<int16_t*>(t->data(channel));
}
-float* AudioBuffer::data_f(int channel) {
+const float* AudioBuffer::data_f(int channel) const {
assert(channel >= 0 && channel < num_proc_channels_);
return channels_->fbuf()->channel(channel);
}
+float* AudioBuffer::data_f(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<float*>(t->data_f(channel));
+}
+
const int16_t* AudioBuffer::low_pass_split_data(int channel) const {
assert(channel >= 0 && channel < num_proc_channels_);
return split_channels_.get() ? split_channels_->low_channel(channel)
@@ -310,12 +315,17 @@ int16_t* AudioBuffer::low_pass_split_data(int channel) {
return const_cast<int16_t*>(t->low_pass_split_data(channel));
}
-float* AudioBuffer::low_pass_split_data_f(int channel) {
+const float* AudioBuffer::low_pass_split_data_f(int channel) const {
assert(channel >= 0 && channel < num_proc_channels_);
return split_channels_.get() ? split_channels_->low_channel_f(channel)
: data_f(channel);
}
+float* AudioBuffer::low_pass_split_data_f(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<float*>(t->low_pass_split_data_f(channel));
+}
+
const int16_t* AudioBuffer::high_pass_split_data(int channel) const {
assert(channel >= 0 && channel < num_proc_channels_);
return split_channels_.get() ? split_channels_->high_channel(channel) : NULL;
@@ -326,12 +336,17 @@ int16_t* AudioBuffer::high_pass_split_data(int channel) {
return const_cast<int16_t*>(t->high_pass_split_data(channel));
}
-float* AudioBuffer::high_pass_split_data_f(int channel) {
+const float* AudioBuffer::high_pass_split_data_f(int channel) const {
assert(channel >= 0 && channel < num_proc_channels_);
return split_channels_.get() ? split_channels_->high_channel_f(channel)
: NULL;
}
+float* AudioBuffer::high_pass_split_data_f(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<float*>(t->high_pass_split_data_f(channel));
+}
+
const int16_t* AudioBuffer::mixed_data(int channel) const {
assert(channel >= 0 && channel < num_mixed_channels_);
diff --git a/modules/audio_processing/audio_buffer.h b/modules/audio_processing/audio_buffer.h
index 67e4f485..2fab814a 100644
--- a/modules/audio_processing/audio_buffer.h
+++ b/modules/audio_processing/audio_buffer.h
@@ -69,8 +69,11 @@ class AudioBuffer {
// Float versions of the accessors, with automatic conversion back and forth
// as necessary. The range of the numbers are the same as for int16_t.
float* data_f(int channel);
+ const float* data_f(int channel) const;
float* low_pass_split_data_f(int channel);
+ const float* low_pass_split_data_f(int channel) const;
float* high_pass_split_data_f(int channel);
+ const float* high_pass_split_data_f(int channel) const;
const float* keyboard_data() const;
diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc
index de387edb..cc2b5c00 100644
--- a/modules/audio_processing/audio_processing_impl.cc
+++ b/modules/audio_processing/audio_processing_impl.cc
@@ -303,10 +303,6 @@ void AudioProcessingImpl::SetExtraOptions(const Config& config) {
(*it)->SetExtraOptions(config);
}
-int AudioProcessingImpl::EnableExperimentalNs(bool enable) {
- return kNoError;
-}
-
int AudioProcessingImpl::input_sample_rate_hz() const {
CriticalSectionScoped crit_scoped(crit_);
return fwd_in_format_.rate();
diff --git a/modules/audio_processing/audio_processing_impl.h b/modules/audio_processing/audio_processing_impl.h
index d34f305a..9753423d 100644
--- a/modules/audio_processing/audio_processing_impl.h
+++ b/modules/audio_processing/audio_processing_impl.h
@@ -92,10 +92,6 @@ class AudioProcessingImpl : public AudioProcessing {
ChannelLayout output_layout,
ChannelLayout reverse_layout) OVERRIDE;
virtual void SetExtraOptions(const Config& config) OVERRIDE;
- virtual int EnableExperimentalNs(bool enable) OVERRIDE;
- virtual bool experimental_ns_enabled() const OVERRIDE {
- return false;
- }
virtual int set_sample_rate_hz(int rate) OVERRIDE;
virtual int input_sample_rate_hz() const OVERRIDE;
virtual int sample_rate_hz() const OVERRIDE;
diff --git a/modules/audio_processing/echo_cancellation_impl.cc b/modules/audio_processing/echo_cancellation_impl.cc
index e770f9fe..47b4f181 100644
--- a/modules/audio_processing/echo_cancellation_impl.cc
+++ b/modules/audio_processing/echo_cancellation_impl.cc
@@ -89,7 +89,7 @@ int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) {
Handle* my_handle = static_cast<Handle*>(handle(handle_index));
err = WebRtcAec_BufferFarend(
my_handle,
- audio->low_pass_split_data(j),
+ audio->low_pass_split_data_f(j),
static_cast<int16_t>(audio->samples_per_split_channel()));
if (err != apm_->kNoError) {
diff --git a/modules/audio_processing/include/audio_processing.h b/modules/audio_processing/include/audio_processing.h
index 1e494d18..6f4cc9e7 100644
--- a/modules/audio_processing/include/audio_processing.h
+++ b/modules/audio_processing/include/audio_processing.h
@@ -209,8 +209,8 @@ class AudioProcessing {
// ensures the options are applied immediately.
virtual void SetExtraOptions(const Config& config) = 0;
- virtual int EnableExperimentalNs(bool enable) = 0;
- virtual bool experimental_ns_enabled() const = 0;
+ virtual int EnableExperimentalNs(bool enable) { return kNoError; }
+ virtual bool experimental_ns_enabled() const { return false; }
// DEPRECATED.
// TODO(ajm): Remove after Chromium has upgraded to using Initialize().
diff --git a/modules/audio_processing/include/mock_audio_processing.h b/modules/audio_processing/include/mock_audio_processing.h
index c1ac23ad..8258bb6c 100644
--- a/modules/audio_processing/include/mock_audio_processing.h
+++ b/modules/audio_processing/include/mock_audio_processing.h
@@ -186,10 +186,6 @@ class MockAudioProcessing : public AudioProcessing {
ChannelLayout reverse_layout));
MOCK_METHOD1(SetExtraOptions,
void(const Config& config));
- MOCK_METHOD1(EnableExperimentalNs,
- int(bool enable));
- MOCK_CONST_METHOD0(experimental_ns_enabled,
- bool());
MOCK_METHOD1(set_sample_rate_hz,
int(int rate));
MOCK_CONST_METHOD0(input_sample_rate_hz,
diff --git a/modules/audio_processing/utility/delay_estimator.c b/modules/audio_processing/utility/delay_estimator.c
index 3b204326..60c7a042 100644
--- a/modules/audio_processing/utility/delay_estimator.c
+++ b/modules/audio_processing/utility/delay_estimator.c
@@ -98,6 +98,7 @@ static void UpdateRobustValidationStatistics(BinaryDelayEstimator* self,
kMaxHitsWhenPossiblyNonCausal : kMaxHitsWhenPossiblyCausal;
int i = 0;
+ assert(self->history_size == self->farend->history_size);
// Reset |candidate_hits| if we have a new candidate.
if (candidate_delay != self->last_candidate_delay) {
self->candidate_hits = 0;
@@ -130,7 +131,7 @@ static void UpdateRobustValidationStatistics(BinaryDelayEstimator* self,
// 4. All other bins are decreased with |valley_depth|.
// TODO(bjornv): Investigate how to make this loop more efficient. Split up
// the loop? Remove parts that doesn't add too much.
- for (i = 0; i < self->farend->history_size; ++i) {
+ for (i = 0; i < self->history_size; ++i) {
int is_in_last_set = (i >= self->last_delay - 2) &&
(i <= self->last_delay + 1) && (i != candidate_delay);
int is_in_candidate_set = (i >= candidate_delay - 2) &&
@@ -277,25 +278,45 @@ BinaryDelayEstimatorFarend* WebRtc_CreateBinaryDelayEstimatorFarend(
// Sanity conditions fulfilled.
self = malloc(sizeof(BinaryDelayEstimatorFarend));
}
- if (self != NULL) {
- int malloc_fail = 0;
-
- self->history_size = history_size;
-
- // Allocate memory for history buffers.
- self->binary_far_history = malloc(history_size * sizeof(uint32_t));
- malloc_fail |= (self->binary_far_history == NULL);
+ if (self == NULL) {
+ return NULL;
+ }
- self->far_bit_counts = malloc(history_size * sizeof(int));
- malloc_fail |= (self->far_bit_counts == NULL);
+ self->history_size = 0;
+ self->binary_far_history = NULL;
+ self->far_bit_counts = NULL;
+ if (WebRtc_AllocateFarendBufferMemory(self, history_size) == 0) {
+ WebRtc_FreeBinaryDelayEstimatorFarend(self);
+ self = NULL;
+ }
+ return self;
+}
- if (malloc_fail) {
- WebRtc_FreeBinaryDelayEstimatorFarend(self);
- self = NULL;
- }
+int WebRtc_AllocateFarendBufferMemory(BinaryDelayEstimatorFarend* self,
+ int history_size) {
+ assert(self != NULL);
+ // (Re-)Allocate memory for history buffers.
+ self->binary_far_history =
+ realloc(self->binary_far_history,
+ history_size * sizeof(*self->binary_far_history));
+ self->far_bit_counts = realloc(self->far_bit_counts,
+ history_size * sizeof(*self->far_bit_counts));
+ if ((self->binary_far_history == NULL) || (self->far_bit_counts == NULL)) {
+ history_size = 0;
+ }
+ // Fill with zeros if we have expanded the buffers.
+ if (history_size > self->history_size) {
+ int size_diff = history_size - self->history_size;
+ memset(&self->binary_far_history[self->history_size],
+ 0,
+ sizeof(*self->binary_far_history) * size_diff);
+ memset(&self->far_bit_counts[self->history_size],
+ 0,
+ sizeof(*self->far_bit_counts) * size_diff);
}
+ self->history_size = history_size;
- return self;
+ return self->history_size;
}
void WebRtc_InitBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self) {
@@ -385,51 +406,84 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
// Sanity conditions fulfilled.
self = malloc(sizeof(BinaryDelayEstimator));
}
+ if (self == NULL) {
+ return NULL;
+ }
- if (self != NULL) {
- int malloc_fail = 0;
-
- self->farend = farend;
- self->near_history_size = max_lookahead + 1;
- self->robust_validation_enabled = 0; // Disabled by default.
- self->allowed_offset = 0;
-
- self->lookahead = max_lookahead;
-
- // Allocate memory for spectrum buffers. The extra array element in
- // |mean_bit_counts| and |histogram| is a dummy element only used while
- // |last_delay| == -2, i.e., before we have a valid estimate.
- self->mean_bit_counts =
- malloc((farend->history_size + 1) * sizeof(int32_t));
- malloc_fail |= (self->mean_bit_counts == NULL);
+ self->farend = farend;
+ self->near_history_size = max_lookahead + 1;
+ self->history_size = 0;
+ self->robust_validation_enabled = 0; // Disabled by default.
+ self->allowed_offset = 0;
- self->bit_counts = malloc(farend->history_size * sizeof(int32_t));
- malloc_fail |= (self->bit_counts == NULL);
+ self->lookahead = max_lookahead;
- // Allocate memory for history buffers.
- self->binary_near_history = malloc((max_lookahead + 1) * sizeof(uint32_t));
- malloc_fail |= (self->binary_near_history == NULL);
+ // Allocate memory for spectrum and history buffers.
+ self->mean_bit_counts = NULL;
+ self->bit_counts = NULL;
+ self->histogram = NULL;
+ self->binary_near_history =
+ malloc((max_lookahead + 1) * sizeof(*self->binary_near_history));
+ if (self->binary_near_history == NULL ||
+ WebRtc_AllocateHistoryBufferMemory(self, farend->history_size) == 0) {
+ WebRtc_FreeBinaryDelayEstimator(self);
+ self = NULL;
+ }
- self->histogram = malloc((farend->history_size + 1) * sizeof(float));
- malloc_fail |= (self->histogram == NULL);
+ return self;
+}
- if (malloc_fail) {
- WebRtc_FreeBinaryDelayEstimator(self);
- self = NULL;
- }
+int WebRtc_AllocateHistoryBufferMemory(BinaryDelayEstimator* self,
+ int history_size) {
+ BinaryDelayEstimatorFarend* far = self->farend;
+ // (Re-)Allocate memory for spectrum and history buffers.
+ if (history_size != far->history_size) {
+ // Only update far-end buffers if we need.
+ history_size = WebRtc_AllocateFarendBufferMemory(far, history_size);
+ }
+ // The extra array element in |mean_bit_counts| and |histogram| is a dummy
+ // element only used while |last_delay| == -2, i.e., before we have a valid
+ // estimate.
+ self->mean_bit_counts =
+ realloc(self->mean_bit_counts,
+ (history_size + 1) * sizeof(*self->mean_bit_counts));
+ self->bit_counts =
+ realloc(self->bit_counts, history_size * sizeof(*self->bit_counts));
+ self->histogram =
+ realloc(self->histogram, (history_size + 1) * sizeof(*self->histogram));
+
+ if ((self->mean_bit_counts == NULL) ||
+ (self->bit_counts == NULL) ||
+ (self->histogram == NULL)) {
+ history_size = 0;
}
+ // Fill with zeros if we have expanded the buffers.
+ if (history_size > self->history_size) {
+ int size_diff = history_size - self->history_size;
+ memset(&self->mean_bit_counts[self->history_size],
+ 0,
+ sizeof(*self->mean_bit_counts) * size_diff);
+ memset(&self->bit_counts[self->history_size],
+ 0,
+ sizeof(*self->bit_counts) * size_diff);
+ memset(&self->histogram[self->history_size],
+ 0,
+ sizeof(*self->histogram) * size_diff);
+ }
+ self->history_size = history_size;
- return self;
+ return self->history_size;
}
void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self) {
int i = 0;
assert(self != NULL);
- memset(self->bit_counts, 0, sizeof(int32_t) * self->farend->history_size);
- memset(self->binary_near_history, 0,
+ memset(self->bit_counts, 0, sizeof(int32_t) * self->history_size);
+ memset(self->binary_near_history,
+ 0,
sizeof(uint32_t) * self->near_history_size);
- for (i = 0; i <= self->farend->history_size; ++i) {
+ for (i = 0; i <= self->history_size; ++i) {
self->mean_bit_counts[i] = (20 << 9); // 20 in Q9.
self->histogram[i] = 0.f;
}
@@ -440,7 +494,7 @@ void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self) {
self->last_delay = -2;
self->last_candidate_delay = -2;
- self->compare_delay = self->farend->history_size;
+ self->compare_delay = self->history_size;
self->candidate_hits = 0;
self->last_delay_histogram = 0.f;
}
@@ -471,6 +525,10 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
int32_t valley_depth = 0;
assert(self != NULL);
+ if (self->farend->history_size != self->history_size) {
+ // Non matching history sizes.
+ return -1;
+ }
if (self->near_history_size > 1) {
// If we apply lookahead, shift near-end binary spectrum history. Insert
// current |binary_near_spectrum| and pull out the delayed one.
@@ -482,10 +540,10 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
// Compare with delayed spectra and store the |bit_counts| for each delay.
BitCountComparison(binary_near_spectrum, self->farend->binary_far_history,
- self->farend->history_size, self->bit_counts);
+ self->history_size, self->bit_counts);
// Update |mean_bit_counts|, which is the smoothed version of |bit_counts|.
- for (i = 0; i < self->farend->history_size; i++) {
+ for (i = 0; i < self->history_size; i++) {
// |bit_counts| is constrained to [0, 32], meaning we can smooth with a
// factor up to 2^26. We use Q9.
int32_t bit_count = (self->bit_counts[i] << 9); // Q9.
@@ -503,7 +561,7 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
// Find |candidate_delay|, |value_best_candidate| and |value_worst_candidate|
// of |mean_bit_counts|.
- for (i = 0; i < self->farend->history_size; i++) {
+ for (i = 0; i < self->history_size; i++) {
if (self->mean_bit_counts[i] < value_best_candidate) {
value_best_candidate = self->mean_bit_counts[i];
candidate_delay = i;
diff --git a/modules/audio_processing/utility/delay_estimator.h b/modules/audio_processing/utility/delay_estimator.h
index 3d5ffce2..65c3f034 100644
--- a/modules/audio_processing/utility/delay_estimator.h
+++ b/modules/audio_processing/utility/delay_estimator.h
@@ -36,6 +36,7 @@ typedef struct {
// Binary history variables.
uint32_t* binary_near_history;
int near_history_size;
+ int history_size;
// Delay estimation variables.
int32_t minimum_probability;
@@ -85,6 +86,19 @@ void WebRtc_FreeBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self);
BinaryDelayEstimatorFarend* WebRtc_CreateBinaryDelayEstimatorFarend(
int history_size);
+// Re-allocates the buffers.
+//
+// Inputs:
+// - self : Pointer to the binary estimation far-end instance
+// which is the return value of
+// WebRtc_CreateBinaryDelayEstimatorFarend().
+// - history_size : Size of the far-end binary spectrum history.
+//
+// Return value:
+// - history_size : The history size allocated.
+int WebRtc_AllocateFarendBufferMemory(BinaryDelayEstimatorFarend* self,
+ int history_size);
+
// Initializes the delay estimation far-end instance created with
// WebRtc_CreateBinaryDelayEstimatorFarend(...).
//
@@ -141,6 +155,20 @@ void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* self);
BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
BinaryDelayEstimatorFarend* farend, int max_lookahead);
+// Re-allocates |history_size| dependent buffers. The far-end buffers will be
+// updated at the same time if needed.
+//
+// Input:
+// - self : Pointer to the binary estimation instance which is
+// the return value of
+// WebRtc_CreateBinaryDelayEstimator().
+// - history_size : Size of the history buffers.
+//
+// Return value:
+// - history_size : The history size allocated.
+int WebRtc_AllocateHistoryBufferMemory(BinaryDelayEstimator* self,
+ int history_size);
+
// Initializes the delay estimation instance created with
// WebRtc_CreateBinaryDelayEstimator(...).
//
@@ -220,5 +248,4 @@ void WebRtc_MeanEstimatorFix(int32_t new_value,
int factor,
int32_t* mean_value);
-
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_H_
diff --git a/modules/audio_processing/utility/delay_estimator_unittest.cc b/modules/audio_processing/utility/delay_estimator_unittest.cc
index ca0901d6..4ebe0e61 100644
--- a/modules/audio_processing/utility/delay_estimator_unittest.cc
+++ b/modules/audio_processing/utility/delay_estimator_unittest.cc
@@ -23,9 +23,13 @@ enum { kSpectrumSize = 65 };
// Delay history sizes.
enum { kMaxDelay = 100 };
enum { kLookahead = 10 };
+enum { kHistorySize = kMaxDelay + kLookahead };
// Length of binary spectrum sequence.
enum { kSequenceLength = 400 };
+const int kDifferentHistorySize = 3;
+const int kDifferentLookahead = 1;
+
const int kEnable[] = { 0, 1 };
const size_t kSizeEnable = sizeof(kEnable) / sizeof(*kEnable);
@@ -56,7 +60,7 @@ class DelayEstimatorTest : public ::testing::Test {
float near_f_[kSpectrumSize];
uint16_t far_u16_[kSpectrumSize];
uint16_t near_u16_[kSpectrumSize];
- uint32_t binary_spectrum_[kSequenceLength + kMaxDelay + kLookahead];
+ uint32_t binary_spectrum_[kSequenceLength + kHistorySize];
};
DelayEstimatorTest::DelayEstimatorTest()
@@ -76,21 +80,20 @@ DelayEstimatorTest::DelayEstimatorTest()
// |kSequenceLength| has to be long enough for the delay estimation to leave
// the initialized state.
binary_spectrum_[0] = 1;
- for (int i = 1; i < (kSequenceLength + kMaxDelay + kLookahead); i++) {
+ for (int i = 1; i < (kSequenceLength + kHistorySize); i++) {
binary_spectrum_[i] = 3 * binary_spectrum_[i - 1];
}
}
void DelayEstimatorTest::SetUp() {
farend_handle_ = WebRtc_CreateDelayEstimatorFarend(kSpectrumSize,
- kMaxDelay + kLookahead);
+ kHistorySize);
ASSERT_TRUE(farend_handle_ != NULL);
farend_self_ = reinterpret_cast<DelayEstimatorFarend*>(farend_handle_);
handle_ = WebRtc_CreateDelayEstimator(farend_handle_, kLookahead);
ASSERT_TRUE(handle_ != NULL);
self_ = reinterpret_cast<DelayEstimator*>(handle_);
- binary_farend_ = WebRtc_CreateBinaryDelayEstimatorFarend(kMaxDelay +
- kLookahead);
+ binary_farend_ = WebRtc_CreateBinaryDelayEstimatorFarend(kHistorySize);
ASSERT_TRUE(binary_farend_ != NULL);
binary_ = WebRtc_CreateBinaryDelayEstimator(binary_farend_, kLookahead);
ASSERT_TRUE(binary_ != NULL);
@@ -226,7 +229,7 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
// Make sure we have a non-NULL value at start, so we can detect NULL after
// create failure.
void* handle = farend_handle_;
- handle = WebRtc_CreateDelayEstimatorFarend(33, kMaxDelay + kLookahead);
+ handle = WebRtc_CreateDelayEstimatorFarend(33, kHistorySize);
EXPECT_TRUE(handle == NULL);
handle = WebRtc_CreateDelayEstimatorFarend(kSpectrumSize, 1);
EXPECT_TRUE(handle == NULL);
@@ -266,6 +269,28 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
EXPECT_EQ(-1, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_,
spectrum_size_, 16));
+ // WebRtc_set_history_size() should return -1 if:
+ // 1) |handle| is a NULL.
+ // 2) |history_size| <= 1.
+ EXPECT_EQ(-1, WebRtc_set_history_size(NULL, 1));
+ EXPECT_EQ(-1, WebRtc_set_history_size(handle_, 1));
+ // WebRtc_history_size() should return -1 if:
+ // 1) NULL pointer input.
+ EXPECT_EQ(-1, WebRtc_history_size(NULL));
+ // 2) there is a mismatch between history size.
+ void* tmp_handle = WebRtc_CreateDelayEstimator(farend_handle_, kHistorySize);
+ EXPECT_EQ(0, WebRtc_InitDelayEstimator(tmp_handle));
+ EXPECT_EQ(kDifferentHistorySize,
+ WebRtc_set_history_size(tmp_handle, kDifferentHistorySize));
+ EXPECT_EQ(kDifferentHistorySize, WebRtc_history_size(tmp_handle));
+ EXPECT_EQ(kHistorySize, WebRtc_set_history_size(handle_, kHistorySize));
+ EXPECT_EQ(-1, WebRtc_history_size(tmp_handle));
+
+ // WebRtc_set_lookahead() should return -1 if we try a value outside the
+ /// buffer.
+ EXPECT_EQ(-1, WebRtc_set_lookahead(handle_, kLookahead + 1));
+ EXPECT_EQ(-1, WebRtc_set_lookahead(handle_, -1));
+
// WebRtc_set_allowed_offset() should return -1 if we have:
// 1) NULL pointer as |handle|.
// 2) |allowed_offset| < 0.
@@ -289,6 +314,8 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
// 1) NULL pointer as |handle|.
// 2) NULL pointer as near-end spectrum.
// 3) Incorrect spectrum size.
+ // 4) Non matching history sizes if multiple delay estimators using the same
+ // far-end reference.
EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(NULL, near_f_,
spectrum_size_));
// Use |handle_| which is properly created at SetUp().
@@ -296,12 +323,18 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
spectrum_size_));
EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(handle_, near_f_,
spectrum_size_ + 1));
+ // |tmp_handle| is already in a non-matching state.
+ EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(tmp_handle,
+ near_f_,
+ spectrum_size_));
// WebRtc_DelayEstimatorProcessFix() should return -1 if we have:
// 1) NULL pointer as |handle|.
- // 3) NULL pointer as near-end spectrum.
- // 4) Incorrect spectrum size.
- // 6) Too high precision in near-end spectrum (Q-domain > 15).
+ // 2) NULL pointer as near-end spectrum.
+ // 3) Incorrect spectrum size.
+ // 4) Too high precision in near-end spectrum (Q-domain > 15).
+ // 5) Non matching history sizes if multiple delay estimators using the same
+ // far-end reference.
EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(NULL, near_u16_, spectrum_size_,
0));
EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, NULL, spectrum_size_,
@@ -310,6 +343,12 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
spectrum_size_ + 1, 0));
EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, near_u16_,
spectrum_size_, 16));
+ // |tmp_handle| is already in a non-matching state.
+ EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(tmp_handle,
+ near_u16_,
+ spectrum_size_,
+ 0));
+ WebRtc_FreeDelayEstimator(tmp_handle);
// WebRtc_last_delay() should return -1 if we have a NULL pointer as |handle|.
EXPECT_EQ(-1, WebRtc_last_delay(NULL));
@@ -344,14 +383,23 @@ TEST_F(DelayEstimatorTest, VerifyEnableRobustValidation) {
TEST_F(DelayEstimatorTest, InitializedSpectrumAfterProcess) {
// In this test we verify that the mean spectra are initialized after first
- // time we call WebRtc_AddFarSpectrum() and Process() respectively.
+ // time we call WebRtc_AddFarSpectrum() and Process() respectively. The test
+ // also verifies the state is not left for zero spectra.
+ const float kZerosFloat[kSpectrumSize] = { 0.0 };
+ const uint16_t kZerosU16[kSpectrumSize] = { 0 };
// For floating point operations, process one frame and verify initialization
// flag.
Init();
+ EXPECT_EQ(0, WebRtc_AddFarSpectrumFloat(farend_handle_, kZerosFloat,
+ spectrum_size_));
+ EXPECT_EQ(0, farend_self_->far_spectrum_initialized);
EXPECT_EQ(0, WebRtc_AddFarSpectrumFloat(farend_handle_, far_f_,
spectrum_size_));
EXPECT_EQ(1, farend_self_->far_spectrum_initialized);
+ EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFloat(handle_, kZerosFloat,
+ spectrum_size_));
+ EXPECT_EQ(0, self_->near_spectrum_initialized);
EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFloat(handle_, near_f_,
spectrum_size_));
EXPECT_EQ(1, self_->near_spectrum_initialized);
@@ -359,9 +407,15 @@ TEST_F(DelayEstimatorTest, InitializedSpectrumAfterProcess) {
// For fixed point operations, process one frame and verify initialization
// flag.
Init();
+ EXPECT_EQ(0, WebRtc_AddFarSpectrumFix(farend_handle_, kZerosU16,
+ spectrum_size_, 0));
+ EXPECT_EQ(0, farend_self_->far_spectrum_initialized);
EXPECT_EQ(0, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_,
spectrum_size_, 0));
EXPECT_EQ(1, farend_self_->far_spectrum_initialized);
+ EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFix(handle_, kZerosU16,
+ spectrum_size_, 0));
+ EXPECT_EQ(0, self_->near_spectrum_initialized);
EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFix(handle_, near_u16_,
spectrum_size_, 0));
EXPECT_EQ(1, self_->near_spectrum_initialized);
@@ -536,6 +590,32 @@ TEST_F(DelayEstimatorTest, VerifyLookaheadAtCreate) {
WebRtc_FreeDelayEstimatorFarend(farend_handle);
}
+TEST_F(DelayEstimatorTest, VerifyLookaheadIsSetAndKeptAfterInit) {
+ EXPECT_EQ(kLookahead, WebRtc_lookahead(handle_));
+ EXPECT_EQ(kDifferentLookahead,
+ WebRtc_set_lookahead(handle_, kDifferentLookahead));
+ EXPECT_EQ(kDifferentLookahead, WebRtc_lookahead(handle_));
+ EXPECT_EQ(0, WebRtc_InitDelayEstimatorFarend(farend_handle_));
+ EXPECT_EQ(kDifferentLookahead, WebRtc_lookahead(handle_));
+ EXPECT_EQ(0, WebRtc_InitDelayEstimator(handle_));
+ EXPECT_EQ(kDifferentLookahead, WebRtc_lookahead(handle_));
+}
+
+TEST_F(DelayEstimatorTest, VerifyHistorySizeAtCreate) {
+ EXPECT_EQ(kHistorySize, WebRtc_history_size(handle_));
+}
+
+TEST_F(DelayEstimatorTest, VerifyHistorySizeIsSetAndKeptAfterInit) {
+ EXPECT_EQ(kHistorySize, WebRtc_history_size(handle_));
+ EXPECT_EQ(kDifferentHistorySize,
+ WebRtc_set_history_size(handle_, kDifferentHistorySize));
+ EXPECT_EQ(kDifferentHistorySize, WebRtc_history_size(handle_));
+ EXPECT_EQ(0, WebRtc_InitDelayEstimator(handle_));
+ EXPECT_EQ(kDifferentHistorySize, WebRtc_history_size(handle_));
+ EXPECT_EQ(0, WebRtc_InitDelayEstimatorFarend(farend_handle_));
+ EXPECT_EQ(kDifferentHistorySize, WebRtc_history_size(handle_));
+}
+
// TODO(bjornv): Add tests for SoftReset...(...).
} // namespace
diff --git a/modules/audio_processing/utility/delay_estimator_wrapper.c b/modules/audio_processing/utility/delay_estimator_wrapper.c
index 6ec894e6..270588f3 100644
--- a/modules/audio_processing/utility/delay_estimator_wrapper.c
+++ b/modules/audio_processing/utility/delay_estimator_wrapper.c
@@ -58,7 +58,7 @@ static void MeanEstimatorFloat(float new_value,
// Return:
// - out : Binary spectrum.
//
-static uint32_t BinarySpectrumFix(uint16_t* spectrum,
+static uint32_t BinarySpectrumFix(const uint16_t* spectrum,
SpectrumType* threshold_spectrum,
int q_domain,
int* threshold_initialized) {
@@ -93,7 +93,7 @@ static uint32_t BinarySpectrumFix(uint16_t* spectrum,
return out;
}
-static uint32_t BinarySpectrumFloat(float* spectrum,
+static uint32_t BinarySpectrumFloat(const float* spectrum,
SpectrumType* threshold_spectrum,
int* threshold_initialized) {
int i = kBandFirst;
@@ -147,7 +147,7 @@ void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size) {
COMPILE_ASSERT(kBandLast - kBandFirst < 32);
if (spectrum_size >= kBandLast) {
- self = malloc(sizeof(DelayEstimator));
+ self = malloc(sizeof(DelayEstimatorFarend));
}
if (self != NULL) {
@@ -197,8 +197,10 @@ void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift) {
WebRtc_SoftResetBinaryDelayEstimatorFarend(self->binary_farend, delay_shift);
}
-int WebRtc_AddFarSpectrumFix(void* handle, uint16_t* far_spectrum,
- int spectrum_size, int far_q) {
+int WebRtc_AddFarSpectrumFix(void* handle,
+ const uint16_t* far_spectrum,
+ int spectrum_size,
+ int far_q) {
DelayEstimatorFarend* self = (DelayEstimatorFarend*) handle;
uint32_t binary_spectrum = 0;
@@ -226,7 +228,8 @@ int WebRtc_AddFarSpectrumFix(void* handle, uint16_t* far_spectrum,
return 0;
}
-int WebRtc_AddFarSpectrumFloat(void* handle, float* far_spectrum,
+int WebRtc_AddFarSpectrumFloat(void* handle,
+ const float* far_spectrum,
int spectrum_size) {
DelayEstimatorFarend* self = (DelayEstimatorFarend*) handle;
uint32_t binary_spectrum = 0;
@@ -324,6 +327,29 @@ int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift) {
return WebRtc_SoftResetBinaryDelayEstimator(self->binary_handle, delay_shift);
}
+int WebRtc_set_history_size(void* handle, int history_size) {
+ DelayEstimator* self = handle;
+
+ if ((self == NULL) || (history_size <= 1)) {
+ return -1;
+ }
+ return WebRtc_AllocateHistoryBufferMemory(self->binary_handle, history_size);
+}
+
+int WebRtc_history_size(const void* handle) {
+ const DelayEstimator* self = handle;
+
+ if (self == NULL) {
+ return -1;
+ }
+ if (self->binary_handle->farend->history_size !=
+ self->binary_handle->history_size) {
+ // Non matching history sizes.
+ return -1;
+ }
+ return self->binary_handle->history_size;
+}
+
int WebRtc_set_lookahead(void* handle, int lookahead) {
DelayEstimator* self = (DelayEstimator*) handle;
assert(self != NULL);
@@ -386,7 +412,7 @@ int WebRtc_is_robust_validation_enabled(const void* handle) {
}
int WebRtc_DelayEstimatorProcessFix(void* handle,
- uint16_t* near_spectrum,
+ const uint16_t* near_spectrum,
int spectrum_size,
int near_q) {
DelayEstimator* self = (DelayEstimator*) handle;
@@ -418,7 +444,7 @@ int WebRtc_DelayEstimatorProcessFix(void* handle,
}
int WebRtc_DelayEstimatorProcessFloat(void* handle,
- float* near_spectrum,
+ const float* near_spectrum,
int spectrum_size) {
DelayEstimator* self = (DelayEstimator*) handle;
uint32_t binary_spectrum = 0;
diff --git a/modules/audio_processing/utility/delay_estimator_wrapper.h b/modules/audio_processing/utility/delay_estimator_wrapper.h
index 13e86bdd..fdadebeb 100644
--- a/modules/audio_processing/utility/delay_estimator_wrapper.h
+++ b/modules/audio_processing/utility/delay_estimator_wrapper.h
@@ -17,9 +17,6 @@
#include "webrtc/typedefs.h"
// Releases the memory allocated by WebRtc_CreateDelayEstimatorFarend(...)
-// Input:
-// - handle : Pointer to the delay estimation far-end instance.
-//
void WebRtc_FreeDelayEstimatorFarend(void* handle);
// Allocates the memory needed by the far-end part of the delay estimation. The
@@ -27,36 +24,28 @@ void WebRtc_FreeDelayEstimatorFarend(void* handle);
// WebRtc_InitDelayEstimatorFarend(...).
//
// Inputs:
-// - spectrum_size : Size of the spectrum used both in far-end and
+// - spectrum_size : Size of the spectrum used both in far-end and
// near-end. Used to allocate memory for spectrum
// specific buffers.
-// - history_size : The far-end history buffer size. Note that the maximum
-// delay which can be estimated is controlled together
-// with |lookahead| through
-// WebRtc_CreateDelayEstimator().
+// - history_size : The far-end history buffer size. A change in buffer
+// size can be forced with WebRtc_set_history_size().
+// Note that the maximum delay which can be estimated is
+// determined together with WebRtc_set_lookahead().
//
// Return value:
-// - void* : Created |handle|. If the memory can't be allocated or
+// - void* : Created |handle|. If the memory can't be allocated or
// if any of the input parameters are invalid NULL is
// returned.
-//
void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size);
// Initializes the far-end part of the delay estimation instance returned by
// WebRtc_CreateDelayEstimatorFarend(...)
-// Input:
-// - handle : Pointer to the delay estimation far-end instance.
-//
-// Output:
-// - handle : Initialized instance.
-//
int WebRtc_InitDelayEstimatorFarend(void* handle);
// Soft resets the far-end part of the delay estimation instance returned by
// WebRtc_CreateDelayEstimatorFarend(...).
// Input:
// - delay_shift : The amount of blocks to shift history buffers.
-//
void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift);
// Adds the far-end spectrum to the far-end history buffer. This spectrum is
@@ -64,7 +53,6 @@ void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift);
// WebRtc_ProcessSpectrum().
//
// Inputs:
-// - handle : Pointer to the delay estimation far-end instance.
// - far_spectrum : Far-end spectrum.
// - spectrum_size : The size of the data arrays (same for both far- and
// near-end).
@@ -73,17 +61,17 @@ void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift);
// Output:
// - handle : Updated far-end instance.
//
-int WebRtc_AddFarSpectrumFix(void* handle, uint16_t* far_spectrum,
- int spectrum_size, int far_q);
+int WebRtc_AddFarSpectrumFix(void* handle,
+ const uint16_t* far_spectrum,
+ int spectrum_size,
+ int far_q);
// See WebRtc_AddFarSpectrumFix() for description.
-int WebRtc_AddFarSpectrumFloat(void* handle, float* far_spectrum,
+int WebRtc_AddFarSpectrumFloat(void* handle,
+ const float* far_spectrum,
int spectrum_size);
// Releases the memory allocated by WebRtc_CreateDelayEstimator(...)
-// Input:
-// - handle : Pointer to the delay estimation instance.
-//
void WebRtc_FreeDelayEstimator(void* handle);
// Allocates the memory needed by the delay estimation. The memory needs to be
@@ -117,24 +105,17 @@ void WebRtc_FreeDelayEstimator(void* handle);
//
// Note that the effective range of delay estimates is
// [-|lookahead|,... ,|history_size|-|lookahead|)
-// where |history_size| was set upon creating the far-end
-// history buffer size.
+// where |history_size| is set through
+// WebRtc_set_history_size().
//
// Return value:
// - void* : Created |handle|. If the memory can't be allocated or
// if any of the input parameters are invalid NULL is
// returned.
-//
void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead);
// Initializes the delay estimation instance returned by
// WebRtc_CreateDelayEstimator(...)
-// Input:
-// - handle : Pointer to the delay estimation instance.
-//
-// Output:
-// - handle : Initialized instance.
-//
int WebRtc_InitDelayEstimator(void* handle);
// Soft resets the delay estimation instance returned by
@@ -144,22 +125,44 @@ int WebRtc_InitDelayEstimator(void* handle);
//
// Return value:
// - actual_shifts : The actual number of shifts performed.
-//
int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift);
+// Sets the effective |history_size| used. Valid values from 2. We simply need
+// at least two delays to compare to perform an estimate. If |history_size| is
+// changed, buffers are reallocated filling in with zeros if necessary.
+// Note that changing the |history_size| affects both buffers in far-end and
+// near-end. Hence it is important to change all DelayEstimators that use the
+// same reference far-end, to the same |history_size| value.
+// Inputs:
+// - handle : Pointer to the delay estimation instance.
+// - history_size : Effective history size to be used.
+// Return value:
+// - new_history_size : The new history size used. If the memory was not able
+// to be allocated 0 is returned.
+int WebRtc_set_history_size(void* handle, int history_size);
+
+// Returns the history_size currently used.
+// Input:
+// - handle : Pointer to the delay estimation instance.
+int WebRtc_history_size(const void* handle);
+
// Sets the amount of |lookahead| to use. Valid values are [0, max_lookahead]
// where |max_lookahead| was set at create time through
// WebRtc_CreateDelayEstimator(...).
//
// Input:
-// - lookahead : The amount of blocks to shift history buffers.
+// - handle : Pointer to the delay estimation instance.
+// - lookahead : The amount of lookahead to be used.
//
// Return value:
-// - new_lookahead : The actual number of shifts performed.
-//
+// - new_lookahead : The actual amount of lookahead set, unless |handle| is
+// a NULL pointer or |lookahead| is invalid, for which an
+// error is returned.
int WebRtc_set_lookahead(void* handle, int lookahead);
// Returns the amount of lookahead we currently use.
+// Input:
+// - handle : Pointer to the delay estimation instance.
int WebRtc_lookahead(void* handle);
// Sets the |allowed_offset| used in the robust validation scheme. If the
@@ -178,8 +181,6 @@ int WebRtc_set_allowed_offset(void* handle, int allowed_offset);
// Returns the |allowed_offset| in number of partitions.
int WebRtc_get_allowed_offset(const void* handle);
-// TODO(bjornv): Implement this functionality. Currently, enabling it has no
-// impact, hence this is an empty API.
// Enables/Disables a robust validation functionality in the delay estimation.
// This is by default set to disabled at create time. The state is preserved
// over a reset.
@@ -209,15 +210,14 @@ int WebRtc_is_robust_validation_enabled(const void* handle);
// - delay : >= 0 - Calculated delay value.
// -1 - Error.
// -2 - Insufficient data for estimation.
-//
int WebRtc_DelayEstimatorProcessFix(void* handle,
- uint16_t* near_spectrum,
+ const uint16_t* near_spectrum,
int spectrum_size,
int near_q);
// See WebRtc_DelayEstimatorProcessFix() for description.
int WebRtc_DelayEstimatorProcessFloat(void* handle,
- float* near_spectrum,
+ const float* near_spectrum,
int spectrum_size);
// Returns the last calculated delay updated by the function
@@ -230,7 +230,6 @@ int WebRtc_DelayEstimatorProcessFloat(void* handle,
// - delay : >= 0 - Last calculated delay value.
// -1 - Error.
// -2 - Insufficient data for estimation.
-//
int WebRtc_last_delay(void* handle);
// Returns the estimation quality/probability of the last calculated delay
diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn
index bb219e2b..be9658f8 100644
--- a/modules/desktop_capture/BUILD.gn
+++ b/modules/desktop_capture/BUILD.gn
@@ -37,10 +37,12 @@ source_set("desktop_capture") {
"mac/desktop_configuration.mm",
"mac/desktop_configuration_monitor.h",
"mac/desktop_configuration_monitor.cc",
- "mac/osx_version.h",
- "mac/osx_version.cc",
+ "mac/full_screen_chrome_window_detector.cc",
+ "mac/full_screen_chrome_window_detector.h",
"mac/scoped_pixel_buffer_object.cc",
"mac/scoped_pixel_buffer_object.h",
+ "mac/window_list_utils.cc",
+ "mac/window_list_utils.h",
"mouse_cursor.cc",
"mouse_cursor.h",
"mouse_cursor_monitor.h",
diff --git a/modules/desktop_capture/OWNERS b/modules/desktop_capture/OWNERS
index 4c0340d6..67d2fa19 100644
--- a/modules/desktop_capture/OWNERS
+++ b/modules/desktop_capture/OWNERS
@@ -1,4 +1,5 @@
alexeypa@chromium.org
+jiayl@webrtc.org
sergeyu@chromium.org
wez@chromium.org
diff --git a/modules/desktop_capture/desktop_capture.gypi b/modules/desktop_capture/desktop_capture.gypi
index 6f4a0830..a0195d66 100644
--- a/modules/desktop_capture/desktop_capture.gypi
+++ b/modules/desktop_capture/desktop_capture.gypi
@@ -38,10 +38,12 @@
"mac/desktop_configuration.mm",
"mac/desktop_configuration_monitor.h",
"mac/desktop_configuration_monitor.cc",
- "mac/osx_version.h",
- "mac/osx_version.cc",
+ "mac/full_screen_chrome_window_detector.cc",
+ "mac/full_screen_chrome_window_detector.h",
"mac/scoped_pixel_buffer_object.cc",
"mac/scoped_pixel_buffer_object.h",
+ "mac/window_list_utils.cc",
+ "mac/window_list_utils.h",
"mouse_cursor.cc",
"mouse_cursor.h",
"mouse_cursor_monitor.h",
diff --git a/modules/desktop_capture/desktop_capture_options.h b/modules/desktop_capture/desktop_capture_options.h
index c6aabd45..030cb2b7 100644
--- a/modules/desktop_capture/desktop_capture_options.h
+++ b/modules/desktop_capture/desktop_capture_options.h
@@ -19,6 +19,7 @@
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+#include "webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h"
#endif
namespace webrtc {
@@ -50,6 +51,14 @@ class DesktopCaptureOptions {
void set_configuration_monitor(scoped_refptr<DesktopConfigurationMonitor> m) {
configuration_monitor_ = m;
}
+
+ FullScreenChromeWindowDetector* full_screen_chrome_window_detector() const {
+ return full_screen_window_detector_;
+ }
+ void set_full_screen_chrome_window_detector(
+ scoped_refptr<FullScreenChromeWindowDetector> detector) {
+ full_screen_window_detector_ = detector;
+ }
#endif
// Flag indicating that the capturer should use screen change notifications.
@@ -82,6 +91,7 @@ class DesktopCaptureOptions {
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
scoped_refptr<DesktopConfigurationMonitor> configuration_monitor_;
+ scoped_refptr<FullScreenChromeWindowDetector> full_screen_window_detector_;
#endif
#if defined(WEBRTC_WIN)
diff --git a/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc b/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc
new file mode 100644
index 00000000..23c432f6
--- /dev/null
+++ b/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc
@@ -0,0 +1,244 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h"
+
+#include <assert.h>
+#include <libproc.h>
+#include <string>
+
+#include "webrtc/base/macutils.h"
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/modules/desktop_capture/mac/window_list_utils.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+
+namespace webrtc {
+
+namespace {
+
+const int64_t kUpdateIntervalMs = 500;
+
+// Returns true if the window is minimized.
+bool IsWindowMinimized(CGWindowID id) {
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&id), 1, NULL);
+ CFArrayRef window_array =
+ CGWindowListCreateDescriptionFromArray(window_id_array);
+ bool minimized = false;
+
+ if (window_array && CFArrayGetCount(window_array)) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFBooleanRef on_screen = reinterpret_cast<CFBooleanRef>(
+ CFDictionaryGetValue(window, kCGWindowIsOnscreen));
+
+ minimized = !on_screen;
+ }
+
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+
+ return minimized;
+}
+
+// Returns true if the window is occupying a full screen.
+bool IsWindowFullScreen(const MacDesktopConfiguration& desktop_config,
+ CFDictionaryRef window) {
+ bool fullscreen = false;
+
+ CFDictionaryRef bounds_ref = reinterpret_cast<CFDictionaryRef>(
+ CFDictionaryGetValue(window, kCGWindowBounds));
+
+ CGRect bounds;
+ if (bounds_ref &&
+ CGRectMakeWithDictionaryRepresentation(bounds_ref, &bounds)) {
+ for (MacDisplayConfigurations::const_iterator it =
+ desktop_config.displays.begin();
+ it != desktop_config.displays.end(); ++it) {
+ if (it->bounds.equals(DesktopRect::MakeXYWH(bounds.origin.x,
+ bounds.origin.y,
+ bounds.size.width,
+ bounds.size.height))) {
+ fullscreen = true;
+ break;
+ }
+ }
+ }
+
+ return fullscreen;
+}
+
+std::string GetWindowTitle(CGWindowID id) {
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&id), 1, NULL);
+ CFArrayRef window_array =
+ CGWindowListCreateDescriptionFromArray(window_id_array);
+ std::string title;
+
+ if (window_array && CFArrayGetCount(window_array)) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFStringRef title_ref = reinterpret_cast<CFStringRef>(
+ CFDictionaryGetValue(window, kCGWindowName));
+
+ if (title_ref)
+ rtc::ToUtf8(title_ref, &title);
+ }
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+
+ return title;
+}
+
+int GetWindowOwnerPid(CGWindowID id) {
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&id), 1, NULL);
+ CFArrayRef window_array =
+ CGWindowListCreateDescriptionFromArray(window_id_array);
+ int pid = 0;
+
+ if (window_array && CFArrayGetCount(window_array)) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFNumberRef pid_ref = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowOwnerPID));
+
+ if (pid_ref)
+ CFNumberGetValue(pid_ref, kCFNumberIntType, &pid);
+ }
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+
+ return pid;
+}
+
+// Returns the window that is full-screen and has the same title and owner pid
+// as the input window.
+CGWindowID FindFullScreenWindowWithSamePidAndTitle(CGWindowID id) {
+ int pid = GetWindowOwnerPid(id);
+ std::string title = GetWindowTitle(id);
+
+ // Only get on screen, non-desktop windows.
+ CFArrayRef window_array = CGWindowListCopyWindowInfo(
+ kCGWindowListOptionOnScreenOnly | kCGWindowListExcludeDesktopElements,
+ kCGNullWindowID);
+ if (!window_array)
+ return kCGNullWindowID;
+
+ CGWindowID full_screen_window = kCGNullWindowID;
+
+ MacDesktopConfiguration desktop_config = MacDesktopConfiguration::GetCurrent(
+ MacDesktopConfiguration::TopLeftOrigin);
+
+ // Check windows to make sure they have an id, title, and use window layer
+ // other than 0.
+ CFIndex count = CFArrayGetCount(window_array);
+ for (CFIndex i = 0; i < count; ++i) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, i));
+ CFStringRef window_title_ref = reinterpret_cast<CFStringRef>(
+ CFDictionaryGetValue(window, kCGWindowName));
+ CFNumberRef window_id_ref = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowNumber));
+ CFNumberRef window_pid_ref = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowOwnerPID));
+
+ if (!window_title_ref || !window_id_ref || !window_pid_ref)
+ continue;
+
+ int window_pid = 0;
+ CFNumberGetValue(window_pid_ref, kCFNumberIntType, &window_pid);
+ if (window_pid != pid)
+ continue;
+
+ std::string window_title;
+ if (!rtc::ToUtf8(window_title_ref, &window_title) ||
+ window_title != title) {
+ continue;
+ }
+
+ CGWindowID window_id;
+ CFNumberGetValue(window_id_ref, kCFNumberIntType, &window_id);
+ if (IsWindowFullScreen(desktop_config, window)) {
+ full_screen_window = window_id;
+ break;
+ }
+ }
+
+ CFRelease(window_array);
+ return full_screen_window;
+}
+
+bool IsChromeWindow(CGWindowID id) {
+ int pid = GetWindowOwnerPid(id);
+ char buffer[PROC_PIDPATHINFO_MAXSIZE];
+ int path_length = proc_pidpath(pid, buffer, sizeof(buffer));
+ if (path_length <= 0)
+ return false;
+
+ const char* last_slash = strrchr(buffer, '/');
+ std::string name(last_slash ? last_slash + 1 : buffer);
+ return name.find("Google Chrome") == 0 || name == "Chromium";
+}
+
+} // namespace
+
+FullScreenChromeWindowDetector::FullScreenChromeWindowDetector()
+ : ref_count_(0) {}
+
+FullScreenChromeWindowDetector::~FullScreenChromeWindowDetector() {}
+
+CGWindowID FullScreenChromeWindowDetector::FindFullScreenWindow(
+ CGWindowID original_window) {
+ if (!IsChromeWindow(original_window) || !IsWindowMinimized(original_window))
+ return kCGNullWindowID;
+
+ CGWindowID full_screen_window_id =
+ FindFullScreenWindowWithSamePidAndTitle(original_window);
+
+ if (full_screen_window_id == kCGNullWindowID)
+ return kCGNullWindowID;
+
+ for (WindowCapturer::WindowList::iterator it = previous_window_list_.begin();
+ it != previous_window_list_.end(); ++it) {
+ if (static_cast<CGWindowID>(it->id) != full_screen_window_id)
+ continue;
+
+ int64_t time_interval =
+ (TickTime::Now() - last_udpate_time_).Milliseconds();
+ LOG(LS_WARNING) << "The full-screen window exists in the list, "
+ << "which was updated " << time_interval << "ms ago.";
+ return kCGNullWindowID;
+ }
+
+ return full_screen_window_id;
+}
+
+void FullScreenChromeWindowDetector::UpdateWindowListIfNeeded(
+ CGWindowID original_window) {
+ if (IsChromeWindow(original_window) &&
+ (TickTime::Now() - last_udpate_time_).Milliseconds()
+ > kUpdateIntervalMs) {
+ previous_window_list_.clear();
+ previous_window_list_.swap(current_window_list_);
+
+ // No need to update the window list when the window is minimized.
+ if (IsWindowMinimized(original_window)) {
+ previous_window_list_.clear();
+ return;
+ }
+
+ GetWindowList(&current_window_list_);
+ last_udpate_time_ = TickTime::Now();
+ }
+}
+
+} // namespace webrtc
diff --git a/modules/desktop_capture/mac/full_screen_chrome_window_detector.h b/modules/desktop_capture/mac/full_screen_chrome_window_detector.h
new file mode 100644
index 00000000..b24fc997
--- /dev/null
+++ b/modules/desktop_capture/mac/full_screen_chrome_window_detector.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_FULL_SCREEN_CHROME_WINDOW_DETECTOR_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_FULL_SCREEN_CHROME_WINDOW_DETECTOR_H_
+
+#include <ApplicationServices/ApplicationServices.h>
+
+#include "webrtc/modules/desktop_capture/window_capturer.h"
+#include "webrtc/system_wrappers/interface/atomic32.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+// This is a work around for the Chrome tab full-screen behavior: Chrome
+// creates a new window in full-screen mode to show a tab full-screen and
+// minimizes the old window. To continue capturing in this case, we try to
+// find the new full-screen window using these criteria:
+// 0. The original shared window is minimized.
+// 1. The original shared window's owner application name is "Google Chrome".
+// 2. The original window and the new window have the same title and owner
+// pid.
+// 3. The new window is full-screen.
+// 4. The new window didn't exist at least 500 millisecond ago.
+
+class FullScreenChromeWindowDetector {
+ public:
+ FullScreenChromeWindowDetector();
+
+ void AddRef() { ++ref_count_; }
+ void Release() {
+ if (--ref_count_ == 0)
+ delete this;
+ }
+
+ // Returns the full-screen window in place of the original window if all the
+ // criteria are met, or kCGNullWindowID if no such window found.
+ CGWindowID FindFullScreenWindow(CGWindowID original_window);
+
+ // The caller should call this function periodically, no less than twice per
+ // second.
+ void UpdateWindowListIfNeeded(CGWindowID original_window);
+
+ private:
+ ~FullScreenChromeWindowDetector();
+
+ Atomic32 ref_count_;
+
+ // We cache the last two results of the window list, so
+ // |previous_window_list_| is taken at least 500ms before the next Capture()
+ // call. If we only save the last result, we may get false positive (i.e.
+ // full-screen window exists in the list) if Capture() is called too soon.
+ WindowCapturer::WindowList current_window_list_;
+ WindowCapturer::WindowList previous_window_list_;
+ TickTime last_udpate_time_;
+
+ DISALLOW_COPY_AND_ASSIGN(FullScreenChromeWindowDetector);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_FULL_SCREEN_CHROME_WINDOW_DETECTOR_H_
diff --git a/modules/desktop_capture/mac/osx_version.cc b/modules/desktop_capture/mac/osx_version.cc
deleted file mode 100644
index 7466f203..00000000
--- a/modules/desktop_capture/mac/osx_version.cc
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <sys/utsname.h>
-
-#include "webrtc/system_wrappers/interface/logging.h"
-
-namespace webrtc {
-
-namespace {
-
-int GetDarwinVersion() {
- struct utsname uname_info;
- if (uname(&uname_info) != 0) {
- LOG(LS_ERROR) << "uname failed";
- return 0;
- }
-
- if (strcmp(uname_info.sysname, "Darwin") != 0)
- return 0;
-
- char* dot;
- int result = strtol(uname_info.release, &dot, 10);
- if (*dot != '.') {
- LOG(LS_ERROR) << "Failed to parse version";
- return 0;
- }
-
- return result;
-}
-
-} // namespace
-
-bool IsOSLionOrLater() {
- static int darwin_version = GetDarwinVersion();
-
- // Verify that the version has been parsed correctly.
- if (darwin_version < 6) {
- LOG_F(LS_ERROR) << "Invalid Darwin version: " << darwin_version;
- abort();
- }
-
- // Darwin major version 11 corresponds to OSX 10.7.
- return darwin_version >= 11;
-}
-
-} // namespace webrtc
diff --git a/modules/desktop_capture/mac/window_list_utils.cc b/modules/desktop_capture/mac/window_list_utils.cc
new file mode 100644
index 00000000..0c3eaa3a
--- /dev/null
+++ b/modules/desktop_capture/mac/window_list_utils.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/mac/window_list_utils.h"
+
+#include <ApplicationServices/ApplicationServices.h>
+
+#include "webrtc/base/macutils.h"
+
+namespace webrtc {
+
+bool GetWindowList(WindowCapturer::WindowList* windows) {
+ // Only get on screen, non-desktop windows.
+ CFArrayRef window_array = CGWindowListCopyWindowInfo(
+ kCGWindowListOptionOnScreenOnly | kCGWindowListExcludeDesktopElements,
+ kCGNullWindowID);
+ if (!window_array)
+ return false;
+
+ // Check windows to make sure they have an id, title, and use window layer
+ // other than 0.
+ CFIndex count = CFArrayGetCount(window_array);
+ for (CFIndex i = 0; i < count; ++i) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, i));
+ CFStringRef window_title = reinterpret_cast<CFStringRef>(
+ CFDictionaryGetValue(window, kCGWindowName));
+ CFNumberRef window_id = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowNumber));
+ CFNumberRef window_layer = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowLayer));
+ if (window_title && window_id && window_layer) {
+ // Skip windows with layer=0 (menu, dock).
+ int layer;
+ CFNumberGetValue(window_layer, kCFNumberIntType, &layer);
+ if (layer != 0)
+ continue;
+
+ int id;
+ CFNumberGetValue(window_id, kCFNumberIntType, &id);
+ WindowCapturer::Window window;
+ window.id = id;
+ if (!rtc::ToUtf8(window_title, &(window.title)) ||
+ window.title.empty()) {
+ continue;
+ }
+ windows->push_back(window);
+ }
+ }
+
+ CFRelease(window_array);
+ return true;
+}
+
+} // namespace webrtc
diff --git a/modules/desktop_capture/mac/osx_version.h b/modules/desktop_capture/mac/window_list_utils.h
index 0ba49a4e..7be38506 100644
--- a/modules/desktop_capture/mac/osx_version.h
+++ b/modules/desktop_capture/mac/window_list_utils.h
@@ -8,9 +8,17 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WINDOW_LIST_UTILS_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_WINDOW_LIST_UTILS_H_
+
+#include "webrtc/modules/desktop_capture/window_capturer.h"
+
namespace webrtc {
-// Returns true if the OS version >= OSX 10.7.
-bool IsOSLionOrLater();
+// A helper function to get the on-screen windows.
+bool GetWindowList(WindowCapturer::WindowList* windows);
} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_WINDOW_LIST_UTILS_H_
+
diff --git a/modules/desktop_capture/mouse_cursor_monitor_mac.mm b/modules/desktop_capture/mouse_cursor_monitor_mac.mm
index e8806338..f33720d1 100644
--- a/modules/desktop_capture/mouse_cursor_monitor_mac.mm
+++ b/modules/desktop_capture/mouse_cursor_monitor_mac.mm
@@ -15,11 +15,12 @@
#include <Cocoa/Cocoa.h>
#include <CoreFoundation/CoreFoundation.h>
+#include "webrtc/base/macutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
-#include "webrtc/modules/desktop_capture/mac/osx_version.h"
+#include "webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@@ -52,6 +53,8 @@ class MouseCursorMonitorMac : public MouseCursorMonitor {
Callback* callback_;
Mode mode_;
scoped_ptr<MouseCursor> last_cursor_;
+ scoped_refptr<FullScreenChromeWindowDetector>
+ full_screen_chrome_window_detector_;
};
MouseCursorMonitorMac::MouseCursorMonitorMac(
@@ -62,9 +65,12 @@ MouseCursorMonitorMac::MouseCursorMonitorMac(
window_id_(window_id),
screen_id_(screen_id),
callback_(NULL),
- mode_(SHAPE_AND_POSITION) {
+ mode_(SHAPE_AND_POSITION),
+ full_screen_chrome_window_detector_(
+ options.full_screen_chrome_window_detector()) {
assert(window_id == kCGNullWindowID || screen_id == kInvalidScreenId);
- if (screen_id != kInvalidScreenId && !IsOSLionOrLater()) {
+ if (screen_id != kInvalidScreenId &&
+ rtc::GetOSVersionName() < rtc::kMacOSLion) {
// Single screen capture is not supported on pre OS X 10.7.
screen_id_ = kFullDesktopScreenId;
}
@@ -115,14 +121,23 @@ void MouseCursorMonitorMac::Capture() {
// if the current mouse position is covered by another window and also adjust
// |position| to make it relative to the window origin.
if (window_id_ != kCGNullWindowID) {
- // Get list of windows that may be covering parts of |window_id_|.
+ CGWindowID on_screen_window = window_id_;
+ if (full_screen_chrome_window_detector_) {
+ CGWindowID full_screen_window =
+ full_screen_chrome_window_detector_->FindFullScreenWindow(window_id_);
+
+ if (full_screen_window != kCGNullWindowID)
+ on_screen_window = full_screen_window;
+ }
+
+ // Get list of windows that may be covering parts of |on_screen_window|.
// CGWindowListCopyWindowInfo() returns windows in order from front to back,
- // so |window_id_| is expected to be the last in the list.
+ // so |on_screen_window| is expected to be the last in the list.
CFArrayRef window_array =
CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly |
kCGWindowListOptionOnScreenAboveWindow |
kCGWindowListOptionIncludingWindow,
- window_id_);
+ on_screen_window);
bool found_window = false;
if (window_array) {
CFIndex count = CFArrayGetCount(window_array);
@@ -158,7 +173,7 @@ void MouseCursorMonitorMac::Capture() {
if (!CFNumberGetValue(window_number, kCFNumberIntType, &window_id))
continue;
- if (window_id == window_id_) {
+ if (window_id == on_screen_window) {
found_window = true;
if (!window_rect.Contains(position))
state = OUTSIDE;
diff --git a/modules/desktop_capture/screen_capturer_mac.mm b/modules/desktop_capture/screen_capturer_mac.mm
index 2d573390..be05bd99 100644
--- a/modules/desktop_capture/screen_capturer_mac.mm
+++ b/modules/desktop_capture/screen_capturer_mac.mm
@@ -20,13 +20,13 @@
#include <OpenGL/CGLMacro.h>
#include <OpenGL/OpenGL.h>
+#include "webrtc/base/macutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
-#include "webrtc/modules/desktop_capture/mac/osx_version.h"
#include "webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h"
#include "webrtc/modules/desktop_capture/mouse_cursor_shape.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
@@ -425,7 +425,7 @@ void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
DesktopFrame* current_frame = queue_.current_frame();
bool flip = false; // GL capturers need flipping.
- if (IsOSLionOrLater()) {
+ if (rtc::GetOSVersionName() >= rtc::kMacOSLion) {
// Lion requires us to use their new APIs for doing screen capture. These
// APIS currently crash on 10.6.8 if there is no monitor attached.
if (!CgBlitPostLion(*current_frame, region)) {
@@ -478,7 +478,7 @@ void ScreenCapturerMac::SetMouseShapeObserver(
bool ScreenCapturerMac::GetScreenList(ScreenList* screens) {
assert(screens->size() == 0);
- if (!IsOSLionOrLater()) {
+ if (rtc::GetOSVersionName() < rtc::kMacOSLion) {
// Single monitor cast is not supported on pre OS X 10.7.
Screen screen;
screen.id = kFullDesktopScreenId;
@@ -496,7 +496,7 @@ bool ScreenCapturerMac::GetScreenList(ScreenList* screens) {
}
bool ScreenCapturerMac::SelectScreen(ScreenId id) {
- if (!IsOSLionOrLater()) {
+ if (rtc::GetOSVersionName() < rtc::kMacOSLion) {
// Ignore the screen selection on unsupported OS.
assert(!current_display_);
return id == kFullDesktopScreenId;
@@ -874,7 +874,7 @@ void ScreenCapturerMac::ScreenConfigurationChanged() {
// contents. Although the API exists in OS 10.6, it crashes the caller if
// the machine has no monitor connected, so we fall back to depcreated APIs
// when running on 10.6.
- if (IsOSLionOrLater()) {
+ if (rtc::GetOSVersionName() >= rtc::kMacOSLion) {
LOG(LS_INFO) << "Using CgBlitPostLion.";
// No need for any OpenGL support on Lion
return;
@@ -922,10 +922,11 @@ void ScreenCapturerMac::ScreenConfigurationChanged() {
LOG(LS_INFO) << "Using GlBlit";
CGLPixelFormatAttribute attributes[] = {
- // This function does an early return if IsOSLionOrLater(), this code only
- // runs on 10.6 and can be deleted once 10.6 support is dropped. So just
- // keep using kCGLPFAFullScreen even though it was deprecated in 10.6 --
- // it's still functional there, and it's not used on newer OS X versions.
+ // This function does an early return if GetOSVersionName() >= kMacOSLion,
+ // this code only runs on 10.6 and can be deleted once 10.6 support is
+ // dropped. So just keep using kCGLPFAFullScreen even though it was
+ // deprecated in 10.6 -- it's still functional there, and it's not used on
+ // newer OS X versions.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
kCGLPFAFullScreen,
diff --git a/modules/desktop_capture/window_capturer_mac.mm b/modules/desktop_capture/window_capturer_mac.mm
index d177fc40..f60be5d6 100644
--- a/modules/desktop_capture/window_capturer_mac.mm
+++ b/modules/desktop_capture/window_capturer_mac.mm
@@ -15,33 +15,21 @@
#include <Cocoa/Cocoa.h>
#include <CoreFoundation/CoreFoundation.h>
+#include "webrtc/base/macutils.h"
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h"
+#include "webrtc/modules/desktop_capture/mac/window_list_utils.h"
#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/system_wrappers/interface/scoped_refptr.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
namespace webrtc {
namespace {
-bool CFStringRefToUtf8(const CFStringRef string, std::string* str_utf8) {
- assert(string);
- assert(str_utf8);
- CFIndex length = CFStringGetLength(string);
- size_t max_length_utf8 =
- CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8);
- str_utf8->resize(max_length_utf8);
- CFIndex used_bytes;
- int result = CFStringGetBytes(
- string, CFRangeMake(0, length), kCFStringEncodingUTF8, 0, false,
- reinterpret_cast<UInt8*>(&*str_utf8->begin()), max_length_utf8,
- &used_bytes);
- if (result != length) {
- str_utf8->clear();
- return false;
- }
- str_utf8->resize(used_bytes);
- return true;
-}
-
+// Returns true if the window exists.
bool IsWindowValid(CGWindowID id) {
CFArrayRef window_id_array =
CFArrayCreate(NULL, reinterpret_cast<const void **>(&id), 1, NULL);
@@ -56,7 +44,9 @@ bool IsWindowValid(CGWindowID id) {
class WindowCapturerMac : public WindowCapturer {
public:
- WindowCapturerMac();
+ explicit WindowCapturerMac(
+ scoped_refptr<FullScreenChromeWindowDetector>
+ full_screen_chrome_window_detector);
virtual ~WindowCapturerMac();
// WindowCapturer interface.
@@ -70,14 +60,22 @@ class WindowCapturerMac : public WindowCapturer {
private:
Callback* callback_;
+
+ // The window being captured.
CGWindowID window_id_;
+ scoped_refptr<FullScreenChromeWindowDetector>
+ full_screen_chrome_window_detector_;
+
DISALLOW_COPY_AND_ASSIGN(WindowCapturerMac);
};
-WindowCapturerMac::WindowCapturerMac()
+WindowCapturerMac::WindowCapturerMac(
+ scoped_refptr<FullScreenChromeWindowDetector>
+ full_screen_chrome_window_detector)
: callback_(NULL),
- window_id_(0) {
+ window_id_(0),
+ full_screen_chrome_window_detector_(full_screen_chrome_window_detector) {
}
WindowCapturerMac::~WindowCapturerMac() {
@@ -114,7 +112,7 @@ bool WindowCapturerMac::GetWindowList(WindowList* windows) {
CFNumberGetValue(window_id, kCFNumberIntType, &id);
WindowCapturer::Window window;
window.id = id;
- if (!CFStringRefToUtf8(window_title, &(window.title)) ||
+ if (!rtc::ToUtf8(window_title, &(window.title)) ||
window.title.empty()) {
continue;
}
@@ -183,9 +181,18 @@ void WindowCapturerMac::Capture(const DesktopRegion& region) {
return;
}
+ CGWindowID on_screen_window = window_id_;
+ if (full_screen_chrome_window_detector_) {
+ CGWindowID full_screen_window =
+ full_screen_chrome_window_detector_->FindFullScreenWindow(window_id_);
+
+ if (full_screen_window != kCGNullWindowID)
+ on_screen_window = full_screen_window;
+ }
+
CGImageRef window_image = CGWindowListCreateImage(
CGRectNull, kCGWindowListOptionIncludingWindow,
- window_id_, kCGWindowImageBoundsIgnoreFraming);
+ on_screen_window, kCGWindowImageBoundsIgnoreFraming);
if (!window_image) {
callback_->OnCaptureCompleted(NULL);
@@ -218,13 +225,16 @@ void WindowCapturerMac::Capture(const DesktopRegion& region) {
CFRelease(window_image);
callback_->OnCaptureCompleted(frame);
+
+ if (full_screen_chrome_window_detector_)
+ full_screen_chrome_window_detector_->UpdateWindowListIfNeeded(window_id_);
}
} // namespace
// static
WindowCapturer* WindowCapturer::Create(const DesktopCaptureOptions& options) {
- return new WindowCapturerMac();
+ return new WindowCapturerMac(options.full_screen_chrome_window_detector());
}
} // namespace webrtc
diff --git a/modules/interface/module_common_types.h b/modules/interface/module_common_types.h
index 2c947071..e37313c9 100644
--- a/modules/interface/module_common_types.h
+++ b/modules/interface/module_common_types.h
@@ -75,14 +75,22 @@ struct RTPVideoHeaderVP8 {
bool beginningOfPartition; // True if this packet is the first
// in a VP8 partition. Otherwise false
};
+
+struct RTPVideoHeaderH264 {
+ uint8_t nalu_header;
+ bool single_nalu;
+};
+
union RTPVideoTypeHeader {
RTPVideoHeaderVP8 VP8;
+ RTPVideoHeaderH264 H264;
};
enum RtpVideoCodecTypes {
kRtpVideoNone,
kRtpVideoGeneric,
- kRtpVideoVp8
+ kRtpVideoVp8,
+ kRtpVideoH264
};
struct RTPVideoHeader {
uint16_t width; // size
diff --git a/modules/pacing/include/mock/mock_paced_sender.h b/modules/pacing/include/mock/mock_paced_sender.h
index 3841ef37..6600a929 100644
--- a/modules/pacing/include/mock/mock_paced_sender.h
+++ b/modules/pacing/include/mock/mock_paced_sender.h
@@ -16,12 +16,13 @@
#include <vector>
#include "webrtc/modules/pacing/include/paced_sender.h"
+#include "webrtc/system_wrappers/interface/clock.h"
namespace webrtc {
class MockPacedSender : public PacedSender {
public:
- MockPacedSender() : PacedSender(NULL, 0, 0) {}
+ MockPacedSender() : PacedSender(Clock::GetRealTimeClock(), NULL, 0, 0) {}
MOCK_METHOD6(SendPacket, bool(Priority priority,
uint32_t ssrc,
uint16_t sequence_number,
diff --git a/modules/pacing/include/paced_sender.h b/modules/pacing/include/paced_sender.h
index 95f1a86e..55497db3 100644
--- a/modules/pacing/include/paced_sender.h
+++ b/modules/pacing/include/paced_sender.h
@@ -16,11 +16,14 @@
#include "webrtc/modules/interface/module.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/typedefs.h"
namespace webrtc {
+class Clock;
class CriticalSectionWrapper;
+
namespace paced_sender {
class IntervalBudget;
struct Packet;
@@ -48,6 +51,7 @@ class PacedSender : public Module {
int64_t capture_time_ms,
bool retransmission) = 0;
// Called when it's a good time to send a padding data.
+ // Returns the number of bytes sent.
virtual int TimeToSendPadding(int bytes) = 0;
protected:
@@ -55,8 +59,17 @@ class PacedSender : public Module {
};
static const int kDefaultMaxQueueLengthMs = 2000;
-
- PacedSender(Callback* callback, int max_bitrate_kbps, int min_bitrate_kbps);
+ // Pace in kbits/s until we receive first estimate.
+ static const int kDefaultInitialPaceKbps = 2000;
+ // Pacing-rate relative to our target send rate.
+ // Multiplicative factor that is applied to the target bitrate to calculate
+ // the number of bytes that can be transmitted per interval.
+ // Increasing this factor will result in lower delays in cases of bitrate
+ // overshoots from the encoder.
+ static const float kDefaultPaceMultiplier;
+
+ PacedSender(Clock* clock, Callback* callback, int max_bitrate_kbps,
+ int min_bitrate_kbps);
virtual ~PacedSender();
@@ -101,40 +114,50 @@ class PacedSender : public Module {
private:
// Return true if next packet in line should be transmitted.
// Return packet list that contains the next packet.
- bool ShouldSendNextPacket(paced_sender::PacketList** packet_list);
+ bool ShouldSendNextPacket(paced_sender::PacketList** packet_list)
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
// Local helper function to GetNextPacket.
- paced_sender::Packet GetNextPacketFromList(paced_sender::PacketList* packets);
+ paced_sender::Packet GetNextPacketFromList(paced_sender::PacketList* packets)
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
- bool SendPacketFromList(paced_sender::PacketList* packet_list);
+ bool SendPacketFromList(paced_sender::PacketList* packet_list)
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
// Updates the number of bytes that can be sent for the next time interval.
- void UpdateBytesPerInterval(uint32_t delta_time_in_ms);
+ void UpdateBytesPerInterval(uint32_t delta_time_in_ms)
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
// Updates the buffers with the number of bytes that we sent.
- void UpdateMediaBytesSent(int num_bytes);
+ void UpdateMediaBytesSent(int num_bytes) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
+
+ Clock* const clock_;
+ Callback* const callback_;
- Callback* callback_;
- bool enabled_;
- bool paused_;
- int max_queue_length_ms_;
scoped_ptr<CriticalSectionWrapper> critsect_;
+ bool enabled_ GUARDED_BY(critsect_);
+ bool paused_ GUARDED_BY(critsect_);
+ int max_queue_length_ms_ GUARDED_BY(critsect_);
// This is the media budget, keeping track of how many bits of media
// we can pace out during the current interval.
- scoped_ptr<paced_sender::IntervalBudget> media_budget_;
+ scoped_ptr<paced_sender::IntervalBudget> media_budget_ GUARDED_BY(critsect_);
// This is the padding budget, keeping track of how many bits of padding we're
// allowed to send out during the current interval. This budget will be
// utilized when there's no media to send.
- scoped_ptr<paced_sender::IntervalBudget> padding_budget_;
-
- TickTime time_last_update_;
- TickTime time_last_send_;
- int64_t capture_time_ms_last_queued_;
- int64_t capture_time_ms_last_sent_;
-
- scoped_ptr<paced_sender::PacketList> high_priority_packets_;
- scoped_ptr<paced_sender::PacketList> normal_priority_packets_;
- scoped_ptr<paced_sender::PacketList> low_priority_packets_;
+ scoped_ptr<paced_sender::IntervalBudget> padding_budget_
+ GUARDED_BY(critsect_);
+
+ TickTime time_last_update_ GUARDED_BY(critsect_);
+ TickTime time_last_send_ GUARDED_BY(critsect_);
+ int64_t capture_time_ms_last_queued_ GUARDED_BY(critsect_);
+ int64_t capture_time_ms_last_sent_ GUARDED_BY(critsect_);
+
+ scoped_ptr<paced_sender::PacketList> high_priority_packets_
+ GUARDED_BY(critsect_);
+ scoped_ptr<paced_sender::PacketList> normal_priority_packets_
+ GUARDED_BY(critsect_);
+ scoped_ptr<paced_sender::PacketList> low_priority_packets_
+ GUARDED_BY(critsect_);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_PACED_SENDER_H_
diff --git a/modules/pacing/paced_sender.cc b/modules/pacing/paced_sender.cc
index e9f9bddc..323cafec 100644
--- a/modules/pacing/paced_sender.cc
+++ b/modules/pacing/paced_sender.cc
@@ -12,7 +12,11 @@
#include <assert.h>
+#include <map>
+#include <set>
+
#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
@@ -35,21 +39,24 @@ namespace webrtc {
namespace paced_sender {
struct Packet {
- Packet(uint32_t ssrc, uint16_t seq_number, int64_t capture_time_ms,
- int64_t enqueue_time_ms, int length_in_bytes, bool retransmission)
- : ssrc_(ssrc),
- sequence_number_(seq_number),
- capture_time_ms_(capture_time_ms),
- enqueue_time_ms_(enqueue_time_ms),
- bytes_(length_in_bytes),
- retransmission_(retransmission) {
- }
- uint32_t ssrc_;
- uint16_t sequence_number_;
- int64_t capture_time_ms_;
- int64_t enqueue_time_ms_;
- int bytes_;
- bool retransmission_;
+ Packet(uint32_t ssrc,
+ uint16_t seq_number,
+ int64_t capture_time_ms,
+ int64_t enqueue_time_ms,
+ int length_in_bytes,
+ bool retransmission)
+ : ssrc(ssrc),
+ sequence_number(seq_number),
+ capture_time_ms(capture_time_ms),
+ enqueue_time_ms(enqueue_time_ms),
+ bytes(length_in_bytes),
+ retransmission(retransmission) {}
+ uint32_t ssrc;
+ uint16_t sequence_number;
+ int64_t capture_time_ms;
+ int64_t enqueue_time_ms;
+ int bytes;
+ bool retransmission;
};
// STL list style class which prevents duplicates in the list.
@@ -67,23 +74,24 @@ class PacketList {
void pop_front() {
Packet& packet = packet_list_.front();
- uint16_t sequence_number = packet.sequence_number_;
+ uint16_t sequence_number = packet.sequence_number;
+ uint32_t ssrc = packet.ssrc;
packet_list_.pop_front();
- sequence_number_set_.erase(sequence_number);
+ sequence_number_set_[ssrc].erase(sequence_number);
}
void push_back(const Packet& packet) {
- if (sequence_number_set_.find(packet.sequence_number_) ==
- sequence_number_set_.end()) {
+ if (sequence_number_set_[packet.ssrc].find(packet.sequence_number) ==
+ sequence_number_set_[packet.ssrc].end()) {
// Don't insert duplicates.
packet_list_.push_back(packet);
- sequence_number_set_.insert(packet.sequence_number_);
+ sequence_number_set_[packet.ssrc].insert(packet.sequence_number);
}
}
private:
std::list<Packet> packet_list_;
- std::set<uint16_t> sequence_number_set_;
+ std::map<uint32_t, std::set<uint16_t> > sequence_number_set_;
};
class IntervalBudget {
@@ -120,14 +128,18 @@ class IntervalBudget {
};
} // namespace paced_sender
-PacedSender::PacedSender(Callback* callback,
+const float PacedSender::kDefaultPaceMultiplier = 2.5f;
+
+PacedSender::PacedSender(Clock* clock,
+ Callback* callback,
int max_bitrate_kbps,
int min_bitrate_kbps)
- : callback_(callback),
+ : clock_(clock),
+ callback_(callback),
+ critsect_(CriticalSectionWrapper::CreateCriticalSection()),
enabled_(true),
paused_(false),
max_queue_length_ms_(kDefaultMaxQueueLengthMs),
- critsect_(CriticalSectionWrapper::CreateCriticalSection()),
media_budget_(new paced_sender::IntervalBudget(max_bitrate_kbps)),
padding_budget_(new paced_sender::IntervalBudget(min_bitrate_kbps)),
time_last_update_(TickTime::Now()),
@@ -178,7 +190,7 @@ bool PacedSender::SendPacket(Priority priority, uint32_t ssrc,
return true; // We can send now.
}
if (capture_time_ms < 0) {
- capture_time_ms = TickTime::MillisecondTimestamp();
+ capture_time_ms = clock_->TimeInMilliseconds();
}
if (priority != kHighPriority &&
capture_time_ms > capture_time_ms_last_queued_) {
@@ -201,7 +213,7 @@ bool PacedSender::SendPacket(Priority priority, uint32_t ssrc,
packet_list->push_back(paced_sender::Packet(ssrc,
sequence_number,
capture_time_ms,
- TickTime::MillisecondTimestamp(),
+ clock_->TimeInMilliseconds(),
bytes,
retransmission));
return false;
@@ -214,22 +226,22 @@ void PacedSender::set_max_queue_length_ms(int max_queue_length_ms) {
int PacedSender::QueueInMs() const {
CriticalSectionScoped cs(critsect_.get());
- int64_t now_ms = TickTime::MillisecondTimestamp();
+ int64_t now_ms = clock_->TimeInMilliseconds();
int64_t oldest_packet_enqueue_time = now_ms;
if (!high_priority_packets_->empty()) {
- oldest_packet_enqueue_time = std::min(
- oldest_packet_enqueue_time,
- high_priority_packets_->front().enqueue_time_ms_);
+ oldest_packet_enqueue_time =
+ std::min(oldest_packet_enqueue_time,
+ high_priority_packets_->front().enqueue_time_ms);
}
if (!normal_priority_packets_->empty()) {
- oldest_packet_enqueue_time = std::min(
- oldest_packet_enqueue_time,
- normal_priority_packets_->front().enqueue_time_ms_);
+ oldest_packet_enqueue_time =
+ std::min(oldest_packet_enqueue_time,
+ normal_priority_packets_->front().enqueue_time_ms);
}
if (!low_priority_packets_->empty()) {
- oldest_packet_enqueue_time = std::min(
- oldest_packet_enqueue_time,
- low_priority_packets_->front().enqueue_time_ms_);
+ oldest_packet_enqueue_time =
+ std::min(oldest_packet_enqueue_time,
+ low_priority_packets_->front().enqueue_time_ms);
}
return now_ms - oldest_packet_enqueue_time;
}
@@ -286,10 +298,10 @@ bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list)
paced_sender::Packet packet = GetNextPacketFromList(packet_list);
critsect_->Leave();
- const bool success = callback_->TimeToSendPacket(packet.ssrc_,
- packet.sequence_number_,
- packet.capture_time_ms_,
- packet.retransmission_);
+ const bool success = callback_->TimeToSendPacket(packet.ssrc,
+ packet.sequence_number,
+ packet.capture_time_ms,
+ packet.retransmission);
critsect_->Enter();
// If packet cannot be sent then keep it in packet list and exit early.
// There's no need to send more packets.
@@ -297,15 +309,15 @@ bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list)
return false;
}
packet_list->pop_front();
- const bool last_packet = packet_list->empty() ||
- packet_list->front().capture_time_ms_ > packet.capture_time_ms_;
+ const bool last_packet =
+ packet_list->empty() ||
+ packet_list->front().capture_time_ms > packet.capture_time_ms;
if (packet_list != high_priority_packets_.get()) {
- if (packet.capture_time_ms_ > capture_time_ms_last_sent_) {
- capture_time_ms_last_sent_ = packet.capture_time_ms_;
- } else if (packet.capture_time_ms_ == capture_time_ms_last_sent_ &&
+ if (packet.capture_time_ms > capture_time_ms_last_sent_) {
+ capture_time_ms_last_sent_ = packet.capture_time_ms;
+ } else if (packet.capture_time_ms == capture_time_ms_last_sent_ &&
last_packet) {
- TRACE_EVENT_ASYNC_END0("webrtc_rtp", "PacedSend",
- packet.capture_time_ms_);
+ TRACE_EVENT_ASYNC_END0("webrtc_rtp", "PacedSend", packet.capture_time_ms);
}
}
return true;
@@ -339,12 +351,13 @@ bool PacedSender::ShouldSendNextPacket(paced_sender::PacketList** packet_list) {
int64_t high_priority_capture_time = -1;
if (!high_priority_packets_->empty()) {
high_priority_capture_time =
- high_priority_packets_->front().capture_time_ms_;
+ high_priority_packets_->front().capture_time_ms;
*packet_list = high_priority_packets_.get();
}
if (!normal_priority_packets_->empty() &&
- (high_priority_capture_time == -1 || high_priority_capture_time >
- normal_priority_packets_->front().capture_time_ms_)) {
+ (high_priority_capture_time == -1 ||
+ high_priority_capture_time >
+ normal_priority_packets_->front().capture_time_ms)) {
*packet_list = normal_priority_packets_.get();
}
if (*packet_list)
@@ -370,7 +383,7 @@ bool PacedSender::ShouldSendNextPacket(paced_sender::PacketList** packet_list) {
paced_sender::Packet PacedSender::GetNextPacketFromList(
paced_sender::PacketList* packets) {
paced_sender::Packet packet = packets->front();
- UpdateMediaBytesSent(packet.bytes_);
+ UpdateMediaBytesSent(packet.bytes);
return packet;
}
diff --git a/modules/pacing/paced_sender_unittest.cc b/modules/pacing/paced_sender_unittest.cc
index 9763c800..55188558 100644
--- a/modules/pacing/paced_sender_unittest.cc
+++ b/modules/pacing/paced_sender_unittest.cc
@@ -12,6 +12,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/pacing/include/paced_sender.h"
+#include "webrtc/system_wrappers/interface/clock.h"
using testing::_;
using testing::Return;
@@ -55,12 +56,13 @@ class PacedSenderPadding : public PacedSender::Callback {
class PacedSenderTest : public ::testing::Test {
protected:
- PacedSenderTest() {
+ PacedSenderTest() : clock_(123456) {
srand(0);
TickTime::UseFakeClock(123456);
// Need to initialize PacedSender after we initialize clock.
send_bucket_.reset(
- new PacedSender(&callback_, kPaceMultiplier * kTargetBitrate, 0));
+ new PacedSender(
+ &clock_, &callback_, kPaceMultiplier * kTargetBitrate, 0));
}
void SendAndExpectPacket(PacedSender::Priority priority,
@@ -75,6 +77,7 @@ class PacedSenderTest : public ::testing::Test {
.WillRepeatedly(Return(true));
}
+ SimulatedClock clock_;
MockPacedSenderCallback callback_;
scoped_ptr<PacedSender> send_bucket_;
};
@@ -84,19 +87,21 @@ TEST_F(PacedSenderTest, QueuePacket) {
uint16_t sequence_number = 1234;
// Due to the multiplicative factor we can send 3 packets not 2 packets.
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
- int64_t queued_packet_timestamp = TickTime::MillisecondTimestamp();
+ clock_.TimeInMilliseconds(), 250, false);
+ int64_t queued_packet_timestamp = clock_.TimeInMilliseconds();
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
sequence_number, queued_packet_timestamp, 250, false));
send_bucket_->Process();
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
+ clock_.AdvanceTimeMilliseconds(4);
TickTime::AdvanceFakeClock(4);
EXPECT_EQ(1, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(1);
TickTime::AdvanceFakeClock(1);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_CALL(callback_, TimeToSendPacket(
@@ -106,11 +111,11 @@ TEST_F(PacedSenderTest, QueuePacket) {
send_bucket_->Process();
sequence_number++;
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, TickTime::MillisecondTimestamp(), 250, false));
+ sequence_number++, clock_.TimeInMilliseconds(), 250, false));
send_bucket_->Process();
}
@@ -121,16 +126,17 @@ TEST_F(PacedSenderTest, PaceQueuedPackets) {
// Due to the multiplicative factor we can send 3 packets not 2 packets.
for (int i = 0; i < 3; ++i) {
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
}
for (int j = 0; j < 30; ++j) {
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, TickTime::MillisecondTimestamp(), 250, false));
+ sequence_number++, clock_.TimeInMilliseconds(), 250, false));
}
send_bucket_->Process();
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
for (int k = 0; k < 10; ++k) {
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_CALL(callback_,
TimeToSendPacket(ssrc, _, _, false))
@@ -140,17 +146,18 @@ TEST_F(PacedSenderTest, PaceQueuedPackets) {
EXPECT_EQ(0, send_bucket_->Process());
}
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number, TickTime::MillisecondTimestamp(), 250, false));
+ sequence_number, clock_.TimeInMilliseconds(), 250, false));
send_bucket_->Process();
}
@@ -162,21 +169,22 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
// Due to the multiplicative factor we can send 3 packets not 2 packets.
for (int i = 0; i < 3; ++i) {
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
}
queued_sequence_number = sequence_number;
for (int j = 0; j < 30; ++j) {
// Send in duplicate packets.
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number, TickTime::MillisecondTimestamp(), 250, false));
+ sequence_number, clock_.TimeInMilliseconds(), 250, false));
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, TickTime::MillisecondTimestamp(), 250, false));
+ sequence_number++, clock_.TimeInMilliseconds(), 250, false));
}
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
send_bucket_->Process();
for (int k = 0; k < 10; ++k) {
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
for (int i = 0; i < 3; ++i) {
@@ -190,17 +198,42 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
EXPECT_EQ(0, send_bucket_->Process());
}
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, TickTime::MillisecondTimestamp(), 250, false));
+ sequence_number++, clock_.TimeInMilliseconds(), 250, false));
+ send_bucket_->Process();
+}
+
+TEST_F(PacedSenderTest, CanQueuePacketsWithSameSequenceNumberOnDifferentSsrcs) {
+ uint32_t ssrc = 12345;
+ uint16_t sequence_number = 1234;
+
+ SendAndExpectPacket(PacedSender::kNormalPriority,
+ ssrc,
+ sequence_number,
+ clock_.TimeInMilliseconds(),
+ 250,
+ false);
+
+ // Expect packet on second ssrc to be queued and sent as well.
+ SendAndExpectPacket(PacedSender::kNormalPriority,
+ ssrc + 1,
+ sequence_number,
+ clock_.TimeInMilliseconds(),
+ 250,
+ false);
+
+ clock_.AdvanceTimeMilliseconds(1000);
+ TickTime::AdvanceFakeClock(1000);
send_bucket_->Process();
}
@@ -211,14 +244,15 @@ TEST_F(PacedSenderTest, Padding) {
send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
// Due to the multiplicative factor we can send 3 packets not 2 packets.
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- TickTime::MillisecondTimestamp(), 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
// No padding is expected since we have sent too much already.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
@@ -227,6 +261,7 @@ TEST_F(PacedSenderTest, Padding) {
EXPECT_CALL(callback_, TimeToSendPadding(250)).Times(1).
WillOnce(Return(250));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
@@ -238,11 +273,13 @@ TEST_F(PacedSenderTest, NoPaddingWhenDisabled) {
// No padding is expected since the pacer is disabled.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
@@ -255,10 +292,11 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
const int kTimeStep = 5;
const int64_t kBitrateWindow = 100;
send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
- int64_t start_time = TickTime::MillisecondTimestamp();
- while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) {
+ int64_t start_time = clock_.TimeInMilliseconds();
+ while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) {
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
capture_time_ms, 250, false);
+ clock_.AdvanceTimeMilliseconds(kTimeStep);
TickTime::AdvanceFakeClock(kTimeStep);
EXPECT_CALL(callback_, TimeToSendPadding(250)).Times(1).
WillOnce(Return(250));
@@ -274,16 +312,17 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
const int64_t kBitrateWindow = 10000;
PacedSenderPadding callback;
send_bucket_.reset(
- new PacedSender(&callback, kPaceMultiplier * kTargetBitrate, 0));
+ new PacedSender(&clock_, &callback, kPaceMultiplier * kTargetBitrate, 0));
send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
- int64_t start_time = TickTime::MillisecondTimestamp();
+ int64_t start_time = clock_.TimeInMilliseconds();
int media_bytes = 0;
- while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) {
+ while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) {
int media_payload = rand() % 100 + 200; // [200, 300] bytes.
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, capture_time_ms,
media_payload, false));
media_bytes += media_payload;
+ clock_.AdvanceTimeMilliseconds(kTimeStep);
TickTime::AdvanceFakeClock(kTimeStep);
send_bucket_->Process();
}
@@ -325,6 +364,7 @@ TEST_F(PacedSenderTest, Priority) {
.WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
@@ -335,6 +375,7 @@ TEST_F(PacedSenderTest, Priority) {
.WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
@@ -344,7 +385,7 @@ TEST_F(PacedSenderTest, Pause) {
uint32_t ssrc_low_priority = 12345;
uint32_t ssrc = 12346;
uint16_t sequence_number = 1234;
- int64_t capture_time_ms = TickTime::MillisecondTimestamp();
+ int64_t capture_time_ms = clock_.TimeInMilliseconds();
EXPECT_EQ(0, send_bucket_->QueueInMs());
@@ -366,15 +407,16 @@ TEST_F(PacedSenderTest, Pause) {
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kHighPriority,
ssrc, sequence_number++, capture_time_ms, 250, false));
+ clock_.AdvanceTimeMilliseconds(10000);
TickTime::AdvanceFakeClock(10000);
- int64_t second_capture_time_ms = TickTime::MillisecondTimestamp();
+ int64_t second_capture_time_ms = clock_.TimeInMilliseconds();
// Expect everything to be queued.
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kLowPriority,
ssrc_low_priority, sequence_number++, second_capture_time_ms, 250,
false));
- EXPECT_EQ(TickTime::MillisecondTimestamp() - capture_time_ms,
+ EXPECT_EQ(clock_.TimeInMilliseconds() - capture_time_ms,
send_bucket_->QueueInMs());
// Expect no packet to come out while paused.
@@ -382,6 +424,7 @@ TEST_F(PacedSenderTest, Pause) {
EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _)).Times(0);
for (int i = 0; i < 10; ++i) {
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
@@ -394,6 +437,7 @@ TEST_F(PacedSenderTest, Pause) {
send_bucket_->Resume();
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
@@ -403,6 +447,7 @@ TEST_F(PacedSenderTest, Pause) {
.Times(1)
.WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
@@ -412,7 +457,7 @@ TEST_F(PacedSenderTest, Pause) {
TEST_F(PacedSenderTest, ResendPacket) {
uint32_t ssrc = 12346;
uint16_t sequence_number = 1234;
- int64_t capture_time_ms = TickTime::MillisecondTimestamp();
+ int64_t capture_time_ms = clock_.TimeInMilliseconds();
EXPECT_EQ(0, send_bucket_->QueueInMs());
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority,
@@ -421,6 +466,7 @@ TEST_F(PacedSenderTest, ResendPacket) {
capture_time_ms,
250,
false));
+ clock_.AdvanceTimeMilliseconds(1);
TickTime::AdvanceFakeClock(1);
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kNormalPriority,
ssrc,
@@ -428,19 +474,21 @@ TEST_F(PacedSenderTest, ResendPacket) {
capture_time_ms + 1,
250,
false));
+ clock_.AdvanceTimeMilliseconds(9999);
TickTime::AdvanceFakeClock(9999);
- EXPECT_EQ(TickTime::MillisecondTimestamp() - capture_time_ms,
+ EXPECT_EQ(clock_.TimeInMilliseconds() - capture_time_ms,
send_bucket_->QueueInMs());
// Fails to send first packet so only one call.
EXPECT_CALL(callback_, TimeToSendPacket(
ssrc, sequence_number, capture_time_ms, false))
.Times(1)
.WillOnce(Return(false));
+ clock_.AdvanceTimeMilliseconds(10000);
TickTime::AdvanceFakeClock(10000);
send_bucket_->Process();
// Queue remains unchanged.
- EXPECT_EQ(TickTime::MillisecondTimestamp() - capture_time_ms,
+ EXPECT_EQ(clock_.TimeInMilliseconds() - capture_time_ms,
send_bucket_->QueueInMs());
// Fails to send second packet.
@@ -452,11 +500,12 @@ TEST_F(PacedSenderTest, ResendPacket) {
ssrc, sequence_number + 1, capture_time_ms + 1, false))
.Times(1)
.WillOnce(Return(false));
+ clock_.AdvanceTimeMilliseconds(10000);
TickTime::AdvanceFakeClock(10000);
send_bucket_->Process();
// Queue is reduced by 1 packet.
- EXPECT_EQ(TickTime::MillisecondTimestamp() - capture_time_ms - 1,
+ EXPECT_EQ(clock_.TimeInMilliseconds() - capture_time_ms - 1,
send_bucket_->QueueInMs());
// Send second packet and queue becomes empty.
@@ -464,6 +513,7 @@ TEST_F(PacedSenderTest, ResendPacket) {
ssrc, sequence_number + 1, capture_time_ms + 1, false))
.Times(1)
.WillOnce(Return(true));
+ clock_.AdvanceTimeMilliseconds(10000);
TickTime::AdvanceFakeClock(10000);
send_bucket_->Process();
EXPECT_EQ(0, send_bucket_->QueueInMs());
@@ -479,21 +529,23 @@ TEST_F(PacedSenderTest, MaxQueueLength) {
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
sequence_number++,
- TickTime::MillisecondTimestamp(),
+ clock_.TimeInMilliseconds(),
1200,
false);
}
+ clock_.AdvanceTimeMilliseconds(2001);
TickTime::AdvanceFakeClock(2001);
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
sequence_number++,
- TickTime::MillisecondTimestamp(),
+ clock_.TimeInMilliseconds(),
1200,
false);
EXPECT_EQ(2001, send_bucket_->QueueInMs());
send_bucket_->Process();
EXPECT_EQ(0, send_bucket_->QueueInMs());
+ clock_.AdvanceTimeMilliseconds(31);
TickTime::AdvanceFakeClock(31);
send_bucket_->Process();
}
@@ -507,10 +559,11 @@ TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
sequence_number,
- TickTime::MillisecondTimestamp(),
+ clock_.TimeInMilliseconds(),
1200,
false);
+ clock_.AdvanceTimeMilliseconds(500);
TickTime::AdvanceFakeClock(500);
EXPECT_EQ(500, send_bucket_->QueueInMs());
send_bucket_->Process();
diff --git a/modules/remote_bitrate_estimator/bwe_simulations.cc b/modules/remote_bitrate_estimator/bwe_simulations.cc
index 6b208e49..47390f78 100644
--- a/modules/remote_bitrate_estimator/bwe_simulations.cc
+++ b/modules/remote_bitrate_estimator/bwe_simulations.cc
@@ -96,6 +96,36 @@ TEST_P(BweSimulation, Choke1000kbps500kbps1000kbps) {
RunFor(60 * 1000);
}
+TEST_P(BweSimulation, PacerChoke1000kbps500kbps1000kbps) {
+ VerboseLogging(true);
+ AdaptiveVideoSender source(0, NULL, 30, 300, 0, 0);
+ PacedVideoSender sender(this, 300, &source);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ filter.SetCapacity(1000);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(1000);
+ RunFor(60 * 1000);
+}
+
+TEST_P(BweSimulation, PacerChoke200kbps30kbps200kbps) {
+ VerboseLogging(true);
+ AdaptiveVideoSender source(0, NULL, 30, 300, 0, 0);
+ PacedVideoSender sender(this, 300, &source);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ filter.SetCapacity(200);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(30);
+ RunFor(60 * 1000);
+ filter.SetCapacity(200);
+ RunFor(60 * 1000);
+}
+
TEST_P(BweSimulation, Choke200kbps30kbps200kbps) {
VerboseLogging(true);
AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
diff --git a/modules/remote_bitrate_estimator/test/bwe_test.cc b/modules/remote_bitrate_estimator/test/bwe_test.cc
index 90ce6a3e..d4919e73 100644
--- a/modules/remote_bitrate_estimator/test/bwe_test.cc
+++ b/modules/remote_bitrate_estimator/test/bwe_test.cc
@@ -76,11 +76,11 @@ class TestedEstimator : public RemoteBitrateObserver {
}
}
- int64_t step_ms = estimator_->TimeUntilNextProcess();
+ int64_t step_ms = std::max(estimator_->TimeUntilNextProcess(), 0);
while ((clock_.TimeInMilliseconds() + step_ms) < packet_time_ms) {
clock_.AdvanceTimeMilliseconds(step_ms);
estimator_->Process();
- step_ms = estimator_->TimeUntilNextProcess();
+ step_ms = std::max(estimator_->TimeUntilNextProcess(), 0);
}
estimator_->IncomingPacket(packet_time_ms, packet.payload_size(),
packet.header());
@@ -195,13 +195,13 @@ class PacketProcessorRunner {
if (queue_.empty()) {
return;
}
- Packets to_transfer;
Packets::iterator it = queue_.begin();
for (; it != queue_.end(); ++it) {
if (it->send_time_us() > end_of_batch_time_us) {
break;
}
}
+ Packets to_transfer;
to_transfer.splice(to_transfer.begin(), queue_, queue_.begin(), it);
batch->merge(to_transfer);
}
diff --git a/modules/remote_bitrate_estimator/test/bwe_test_framework.cc b/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
index b3cd7db9..194db4d3 100644
--- a/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
+++ b/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
@@ -154,6 +154,11 @@ void Packet::set_send_time_us(int64_t send_time_us) {
send_time_us_ = send_time_us;
}
+void Packet::SetAbsSendTimeMs(int64_t abs_send_time_ms) {
+ header_.extension.absoluteSendTime = ((static_cast<int64_t>(abs_send_time_ms *
+ (1 << 18)) + 500) / 1000) & 0x00fffffful;
+}
+
bool IsTimeSorted(const Packets& packets) {
PacketsConstIt last_it = packets.begin();
for (PacketsConstIt it = last_it; it != packets.end(); ++it) {
@@ -566,12 +571,8 @@ void VideoSender::RunFor(int64_t time_ms, Packets* in_out) {
now_ms_ += time_ms;
Packets new_packets;
while (now_ms_ >= next_frame_ms_) {
- prototype_header_.sequenceNumber++;
prototype_header_.timestamp = kTimestampBase +
static_cast<uint32_t>(next_frame_ms_ * 90.0);
- prototype_header_.extension.absoluteSendTime = (kTimestampBase +
- ((static_cast<int64_t>(next_frame_ms_ * (1 << 18)) + 500) / 1000)) &
- 0x00fffffful;
prototype_header_.extension.transmissionTimeOffset = 0;
// Generate new packets for this frame, all with the same timestamp,
@@ -581,9 +582,11 @@ void VideoSender::RunFor(int64_t time_ms, Packets* in_out) {
int64_t send_time_us = next_frame_ms_ * 1000.0;
uint32_t payload_size = frame_size_bytes_;
while (payload_size > 0) {
+ ++prototype_header_.sequenceNumber;
uint32_t size = std::min(kMaxPayloadSizeBytes, payload_size);
new_packets.push_back(Packet(flow_ids()[0], send_time_us, size,
prototype_header_));
+ new_packets.back().SetAbsSendTimeMs(next_frame_ms_);
payload_size -= size;
}
@@ -604,6 +607,98 @@ void AdaptiveVideoSender::GiveFeedback(const PacketSender::Feedback& feedback) {
bytes_per_second_ = feedback.estimated_bps / 8;
frame_size_bytes_ = (bytes_per_second_ * frame_period_ms_ + 500) / 1000;
}
+
+PacedVideoSender::PacedVideoSender(PacketProcessorListener* listener,
+ uint32_t kbps,
+ AdaptiveVideoSender* source)
+ // It is important that the first_frame_offset and the initial time of
+ // clock_ are both zero, otherwise we can't have absolute time in this
+ // class.
+ : PacketSender(listener, source->flow_ids()),
+ clock_(0),
+ start_of_run_ms_(0),
+ pacer_(&clock_, this, PacedSender::kDefaultPaceMultiplier * kbps, 0),
+ source_(source) {}
+
+void PacedVideoSender::RunFor(int64_t time_ms, Packets* in_out) {
+ start_of_run_ms_ = clock_.TimeInMilliseconds();
+ Packets generated_packets;
+ source_->RunFor(time_ms, &generated_packets);
+ Packets::iterator it = generated_packets.begin();
+ // Run process periodically to allow the packets to be paced out.
+ const int kProcessIntervalMs = 10;
+ for (int64_t current_time = 0; current_time < time_ms;
+ current_time += kProcessIntervalMs) {
+ int64_t end_of_interval_us =
+ 1000 * (clock_.TimeInMilliseconds() + kProcessIntervalMs);
+ while (it != generated_packets.end() &&
+ end_of_interval_us >= it->send_time_us()) {
+ // Time to send next packet to pacer.
+ pacer_.SendPacket(PacedSender::kNormalPriority,
+ it->header().ssrc,
+ it->header().sequenceNumber,
+ (it->send_time_us() + 500) / 1000,
+ it->payload_size(),
+ false);
+ pacer_queue_.push_back(*it);
+ const size_t kMaxPacerQueueSize = 1000;
+ if (pacer_queue_.size() > kMaxPacerQueueSize) {
+ pacer_queue_.pop_front();
+ }
+ ++it;
+ }
+ clock_.AdvanceTimeMilliseconds(kProcessIntervalMs);
+ pacer_.Process();
+ }
+ QueuePackets(in_out, (start_of_run_ms_ + time_ms) * 1000);
+}
+
+void PacedVideoSender::QueuePackets(Packets* batch,
+ int64_t end_of_batch_time_us) {
+ queue_.merge(*batch);
+ if (queue_.empty()) {
+ return;
+ }
+ Packets::iterator it = queue_.begin();
+ for (; it != queue_.end(); ++it) {
+ if (it->send_time_us() > end_of_batch_time_us) {
+ break;
+ }
+ }
+ Packets to_transfer;
+ to_transfer.splice(to_transfer.begin(), queue_, queue_.begin(), it);
+ batch->merge(to_transfer);
+}
+
+void PacedVideoSender::GiveFeedback(const PacketSender::Feedback& feedback) {
+ source_->GiveFeedback(feedback);
+ pacer_.UpdateBitrate(
+ PacedSender::kDefaultPaceMultiplier * feedback.estimated_bps / 1000, 0);
+}
+
+bool PacedVideoSender::TimeToSendPacket(uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_time_ms,
+ bool retransmission) {
+ for (Packets::iterator it = pacer_queue_.begin(); it != pacer_queue_.end();
+ ++it) {
+ if (it->header().sequenceNumber == sequence_number) {
+ int64_t pace_out_time_ms = clock_.TimeInMilliseconds();
+ // Make sure a packet is never paced out earlier than when it was put into
+ // the pacer.
+ assert(1000 * pace_out_time_ms >= it->send_time_us());
+ it->SetAbsSendTimeMs(pace_out_time_ms);
+ it->set_send_time_us(1000 * pace_out_time_ms);
+ queue_.push_back(*it);
+ return true;
+ }
+ }
+ return false;
+}
+
+int PacedVideoSender::TimeToSendPadding(int bytes) {
+ return 0;
+}
} // namespace bwe
} // namespace testing
} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/test/bwe_test_framework.h b/modules/remote_bitrate_estimator/test/bwe_test_framework.h
index 8af07f91..0ab3b5f9 100644
--- a/modules/remote_bitrate_estimator/test/bwe_test_framework.h
+++ b/modules/remote_bitrate_estimator/test/bwe_test_framework.h
@@ -22,7 +22,9 @@
#include <vector>
#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/pacing/include/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
@@ -161,6 +163,7 @@ class Packet {
int64_t creation_time_us() const { return creation_time_us_; }
void set_send_time_us(int64_t send_time_us);
int64_t send_time_us() const { return send_time_us_; }
+ void SetAbsSendTimeMs(int64_t abs_send_time_ms);
uint32_t payload_size() const { return payload_size_; }
const RTPHeader& header() const { return header_; }
@@ -391,9 +394,9 @@ class VideoSender : public PacketSender {
uint32_t max_payload_size_bytes() const { return kMaxPayloadSizeBytes; }
uint32_t bytes_per_second() const { return bytes_per_second_; }
- virtual uint32_t GetCapacityKbps() const;
+ virtual uint32_t GetCapacityKbps() const OVERRIDE;
- virtual void RunFor(int64_t time_ms, Packets* in_out);
+ virtual void RunFor(int64_t time_ms, Packets* in_out) OVERRIDE;
protected:
const uint32_t kMaxPayloadSizeBytes;
@@ -417,12 +420,43 @@ class AdaptiveVideoSender : public VideoSender {
float first_frame_offset);
virtual ~AdaptiveVideoSender() {}
- virtual int GetFeedbackIntervalMs() const { return 100; }
- virtual void GiveFeedback(const Feedback& feedback);
+ virtual int GetFeedbackIntervalMs() const OVERRIDE { return 100; }
+ virtual void GiveFeedback(const Feedback& feedback) OVERRIDE;
-private:
+ private:
DISALLOW_IMPLICIT_CONSTRUCTORS(AdaptiveVideoSender);
};
+
+class PacedVideoSender : public PacketSender, public PacedSender::Callback {
+ public:
+ PacedVideoSender(PacketProcessorListener* listener,
+ uint32_t kbps, AdaptiveVideoSender* source);
+ virtual ~PacedVideoSender() {}
+
+ virtual int GetFeedbackIntervalMs() const OVERRIDE { return 100; }
+ virtual void GiveFeedback(const Feedback& feedback) OVERRIDE;
+ virtual void RunFor(int64_t time_ms, Packets* in_out) OVERRIDE;
+
+ // Implements PacedSender::Callback.
+ virtual bool TimeToSendPacket(uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_time_ms,
+ bool retransmission) OVERRIDE;
+ virtual int TimeToSendPadding(int bytes) OVERRIDE;
+
+ private:
+ void QueuePackets(Packets* batch, int64_t end_of_batch_time_us);
+
+ static const int64_t kInitialTimeMs = 0;
+ SimulatedClock clock_;
+ int64_t start_of_run_ms_;
+ PacedSender pacer_;
+ Packets pacer_queue_;
+ Packets queue_;
+ AdaptiveVideoSender* source_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(PacedVideoSender);
+};
} // namespace bwe
} // namespace testing
} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc b/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc
index ec329268..6e735785 100644
--- a/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc
+++ b/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc
@@ -500,7 +500,7 @@ TEST(BweTestFramework_JitterFilterTest, Jitter1031) {
TestJitterFilter(1031);
}
-static void TestReorderFilter(uint32_t reorder_percent, uint32_t near) {
+static void TestReorderFilter(uint32_t reorder_percent, uint32_t near_value) {
const uint32_t kPacketCount = 10000;
// Generate packets with 10 ms interval.
@@ -533,7 +533,8 @@ static void TestReorderFilter(uint32_t reorder_percent, uint32_t near) {
// Because reordering is random, we allow a threshold when comparing. The
// maximum distance a packet can be moved is PacketCount - 1.
- EXPECT_NEAR(((kPacketCount - 1) * reorder_percent) / 100, distance, near);
+ EXPECT_NEAR(
+ ((kPacketCount - 1) * reorder_percent) / 100, distance, near_value);
}
TEST(BweTestFramework_ReorderFilterTest, Reorder0) {
diff --git a/modules/rtp_rtcp/interface/rtp_header_parser.h b/modules/rtp_rtcp/interface/rtp_header_parser.h
index a13f5b80..2809996b 100644
--- a/modules/rtp_rtcp/interface/rtp_header_parser.h
+++ b/modules/rtp_rtcp/interface/rtp_header_parser.h
@@ -23,13 +23,14 @@ class RtpHeaderParser {
virtual ~RtpHeaderParser() {}
// Returns true if the packet is an RTCP packet, false otherwise.
- static bool IsRtcp(const uint8_t* packet, int length);
+ static bool IsRtcp(const uint8_t* packet, size_t length);
// Parses the packet and stores the parsed packet in |header|. Returns true on
// success, false otherwise.
// This method is thread-safe in the sense that it can parse multiple packets
// at once.
- virtual bool Parse(const uint8_t* packet, int length,
+ virtual bool Parse(const uint8_t* packet,
+ size_t length,
RTPHeader* header) const = 0;
// Registers an RTP header extension and binds it to |id|.
diff --git a/modules/rtp_rtcp/interface/rtp_payload_registry.h b/modules/rtp_rtcp/interface/rtp_payload_registry.h
index 965f4b02..327ea165 100644
--- a/modules/rtp_rtcp/interface/rtp_payload_registry.h
+++ b/modules/rtp_rtcp/interface/rtp_payload_registry.h
@@ -25,17 +25,15 @@ class RTPPayloadStrategy {
virtual bool CodecsMustBeUnique() const = 0;
- virtual bool PayloadIsCompatible(
- const ModuleRTPUtility::Payload& payload,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate) const = 0;
+ virtual bool PayloadIsCompatible(const RtpUtility::Payload& payload,
+ const uint32_t frequency,
+ const uint8_t channels,
+ const uint32_t rate) const = 0;
- virtual void UpdatePayloadRate(
- ModuleRTPUtility::Payload* payload,
- const uint32_t rate) const = 0;
+ virtual void UpdatePayloadRate(RtpUtility::Payload* payload,
+ const uint32_t rate) const = 0;
- virtual ModuleRTPUtility::Payload* CreatePayloadType(
+ virtual RtpUtility::Payload* CreatePayloadType(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t frequency,
@@ -43,7 +41,7 @@ class RTPPayloadStrategy {
const uint32_t rate) const = 0;
virtual int GetPayloadTypeFrequency(
- const ModuleRTPUtility::Payload& payload) const = 0;
+ const RtpUtility::Payload& payload) const = 0;
static RTPPayloadStrategy* CreateStrategy(const bool handling_audio);
@@ -99,9 +97,8 @@ class RTPPayloadRegistry {
int GetPayloadTypeFrequency(uint8_t payload_type) const;
- bool PayloadTypeToPayload(
- const uint8_t payload_type,
- ModuleRTPUtility::Payload*& payload) const;
+ bool PayloadTypeToPayload(const uint8_t payload_type,
+ RtpUtility::Payload*& payload) const;
void ResetLastReceivedPayloadTypes() {
CriticalSectionScoped cs(crit_sect_.get());
@@ -151,7 +148,7 @@ class RTPPayloadRegistry {
bool IsRtxInternal(const RTPHeader& header) const;
scoped_ptr<CriticalSectionWrapper> crit_sect_;
- ModuleRTPUtility::PayloadTypeMap payload_type_map_;
+ RtpUtility::PayloadTypeMap payload_type_map_;
scoped_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
int8_t red_payload_type_;
int8_t ulpfec_payload_type_;
diff --git a/modules/rtp_rtcp/interface/rtp_rtcp.h b/modules/rtp_rtcp/interface/rtp_rtcp.h
index 95c565f0..235ca849 100644
--- a/modules/rtp_rtcp/interface/rtp_rtcp.h
+++ b/modules/rtp_rtcp/interface/rtp_rtcp.h
@@ -67,6 +67,7 @@ class RtpRtcp : public Module {
RtpAudioFeedback* audio_messages;
RemoteBitrateEstimator* remote_bitrate_estimator;
PacedSender* paced_sender;
+ BitrateStatisticsObserver* send_bitrate_observer;
};
/*
@@ -203,6 +204,10 @@ class RtpRtcp : public Module {
*/
virtual int32_t SetSequenceNumber(const uint16_t seq) = 0;
+ virtual void SetRtpStateForSsrc(uint32_t ssrc,
+ const RtpState& rtp_state) = 0;
+ virtual bool GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) = 0;
+
/*
* Get SSRC
*/
@@ -305,13 +310,6 @@ class RtpRtcp : public Module {
uint32_t* nackRate) const = 0;
/*
- * Called on any new send bitrate estimate.
- */
- virtual void RegisterVideoBitrateObserver(
- BitrateStatisticsObserver* observer) = 0;
- virtual BitrateStatisticsObserver* GetVideoBitrateObserver() const = 0;
-
- /*
* Used by the codec module to deliver a video or audio frame for
* packetization.
*
diff --git a/modules/rtp_rtcp/interface/rtp_rtcp_defines.h b/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
index 6f99f938..e1bec5fc 100644
--- a/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
+++ b/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
@@ -192,6 +192,20 @@ struct RtcpReceiveTimeInfo {
typedef std::list<RTCPReportBlock> ReportBlockList;
+struct RtpState {
+ RtpState()
+ : sequence_number(0),
+ start_timestamp(0),
+ timestamp(0),
+ capture_time_ms(-1),
+ last_timestamp_time_ms(-1) {}
+ uint16_t sequence_number;
+ uint32_t start_timestamp;
+ uint32_t timestamp;
+ int64_t capture_time_ms;
+ int64_t last_timestamp_time_ms;
+};
+
class RtpData
{
public:
diff --git a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index 03156c79..fe20c6a3 100644
--- a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -74,6 +74,9 @@ class MockRtpRtcp : public RtpRtcp {
uint16_t());
MOCK_METHOD1(SetSequenceNumber,
int32_t(const uint16_t seq));
+ MOCK_METHOD2(SetRtpStateForSsrc,
+ void(uint32_t ssrc, const RtpState& rtp_state));
+ MOCK_METHOD2(GetRtpStateForSsrc, bool(uint32_t ssrc, RtpState* rtp_state));
MOCK_CONST_METHOD0(SSRC,
uint32_t());
MOCK_METHOD1(SetSSRC,
diff --git a/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc b/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc
index d62f50b4..6560209c 100644
--- a/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc
+++ b/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc
@@ -238,9 +238,14 @@ RTPSenderH264::SendH264FillerData(const WebRtcRTPHeader* rtpHeader,
dataBuffer[0] = static_cast<uint8_t>(0x80); // version 2
dataBuffer[1] = rtpHeader->header.payloadType;
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _rtpSender.IncrementSequenceNumber()); // get the current SequenceNumber and add by 1 after returning
- ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+4, rtpHeader->header.timestamp);
- ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, rtpHeader->header.ssrc);
+ RtpUtility::AssignUWord16ToBuffer(
+ dataBuffer + 2,
+ _rtpSender.IncrementSequenceNumber()); // get the current
+ // SequenceNumber and add by 1
+ // after returning
+ RtpUtility::AssignUWord32ToBuffer(dataBuffer + 4,
+ rtpHeader->header.timestamp);
+ RtpUtility::AssignUWord32ToBuffer(dataBuffer + 8, rtpHeader->header.ssrc);
// set filler NALU type
dataBuffer[12] = 12; // NRI field = 0, type 12
@@ -361,8 +366,12 @@ RTPSenderH264::SendH264SVCRelayPacket(const WebRtcRTPHeader* rtpHeader,
// _sequenceNumber will not work for re-ordering by NACK from original sender
// engine responsible for this
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _rtpSender.IncrementSequenceNumber()); // get the current SequenceNumber and add by 1 after returning
- //ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, ssrc);
+ RtpUtility::AssignUWord16ToBuffer(
+ dataBuffer + 2,
+ _rtpSender.IncrementSequenceNumber()); // get the current
+ // SequenceNumber and add by 1
+ // after returning
+ // RtpUtility::AssignUWord32ToBuffer(dataBuffer+8, ssrc);
// how do we know it's the last relayed packet in a frame?
// 1) packets arrive in order, the engine manages that
diff --git a/modules/rtp_rtcp/source/fec_receiver_impl.cc b/modules/rtp_rtcp/source/fec_receiver_impl.cc
index 0d6c174a..e795841f 100644
--- a/modules/rtp_rtcp/source/fec_receiver_impl.cc
+++ b/modules/rtp_rtcp/source/fec_receiver_impl.cc
@@ -171,7 +171,7 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
payload_data_length - REDHeaderLength);
received_packet->pkt->length = payload_data_length - REDHeaderLength;
received_packet->ssrc =
- ModuleRTPUtility::BufferToUWord32(&incoming_rtp_packet[8]);
+ RtpUtility::BufferToUWord32(&incoming_rtp_packet[8]);
} else {
// copy the RTP header
diff --git a/modules/rtp_rtcp/source/fec_test_helper.cc b/modules/rtp_rtcp/source/fec_test_helper.cc
index 176954f9..0ffd5bf4 100644
--- a/modules/rtp_rtcp/source/fec_test_helper.cc
+++ b/modules/rtp_rtcp/source/fec_test_helper.cc
@@ -86,9 +86,9 @@ void FrameGenerator::BuildRtpHeader(uint8_t* data, const RTPHeader* header) {
data[0] = 0x80; // Version 2.
data[1] = header->payloadType;
data[1] |= (header->markerBit ? kRtpMarkerBitMask : 0);
- ModuleRTPUtility::AssignUWord16ToBuffer(data + 2, header->sequenceNumber);
- ModuleRTPUtility::AssignUWord32ToBuffer(data + 4, header->timestamp);
- ModuleRTPUtility::AssignUWord32ToBuffer(data + 8, header->ssrc);
+ RtpUtility::AssignUWord16ToBuffer(data + 2, header->sequenceNumber);
+ RtpUtility::AssignUWord32ToBuffer(data + 4, header->timestamp);
+ RtpUtility::AssignUWord32ToBuffer(data + 8, header->ssrc);
}
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/forward_error_correction.cc b/modules/rtp_rtcp/source/forward_error_correction.cc
index 31303c8a..b02ea086 100644
--- a/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -230,7 +230,7 @@ void ForwardErrorCorrection::GenerateFecBitStrings(
Packet* media_packet = *media_list_it;
// Assign network-ordered media payload length.
- ModuleRTPUtility::AssignUWord16ToBuffer(
+ RtpUtility::AssignUWord16ToBuffer(
media_payload_length, media_packet->length - kRtpHeaderSize);
fec_packet_length = media_packet->length + fec_rtp_offset;
@@ -432,7 +432,7 @@ void ForwardErrorCorrection::GenerateFecUlpHeaders(
// -- ULP header --
// Copy the payload size to the protection length field.
// (We protect the entire packet.)
- ModuleRTPUtility::AssignUWord16ToBuffer(
+ RtpUtility::AssignUWord16ToBuffer(
&generated_fec_packets_[i].data[10],
generated_fec_packets_[i].length - kFecHeaderSize - ulp_header_size);
@@ -537,7 +537,7 @@ void ForwardErrorCorrection::InsertFECPacket(
fec_packet->ssrc = rx_packet->ssrc;
const uint16_t seq_num_base =
- ModuleRTPUtility::BufferToUWord16(&fec_packet->pkt->data[2]);
+ RtpUtility::BufferToUWord16(&fec_packet->pkt->data[2]);
const uint16_t maskSizeBytes =
(fec_packet->pkt->data[0] & 0x40) ? kMaskSizeLBitSet
: kMaskSizeLBitClear; // L bit set?
@@ -603,6 +603,23 @@ void ForwardErrorCorrection::InsertPackets(
while (!received_packet_list->empty()) {
ReceivedPacket* rx_packet = received_packet_list->front();
+ // Check for discarding oldest FEC packet, to avoid wrong FEC decoding from
+ // sequence number wrap-around. Detection of old FEC packet is based on
+ // sequence number difference of received packet and oldest packet in FEC
+ // packet list.
+ // TODO(marpan/holmer): We should be able to improve detection/discarding of
+ // old FEC packets based on timestamp information or better sequence number
+ // thresholding (e.g., to distinguish between wrap-around and reordering).
+ if (!fec_packet_list_.empty()) {
+ uint16_t seq_num_diff = abs(
+ static_cast<int>(rx_packet->seq_num) -
+ static_cast<int>(fec_packet_list_.front()->seq_num));
+ if (seq_num_diff > 0x3fff) {
+ DiscardFECPacket(fec_packet_list_.front());
+ fec_packet_list_.pop_front();
+ }
+ }
+
if (rx_packet->is_fec) {
InsertFECPacket(rx_packet, recovered_packet_list);
} else {
@@ -633,7 +650,7 @@ void ForwardErrorCorrection::InitRecovery(const FecPacket* fec_packet,
// Copy FEC payload, skipping the ULP header.
memcpy(&recovered->pkt->data[kRtpHeaderSize],
&fec_packet->pkt->data[kFecHeaderSize + ulp_header_size],
- ModuleRTPUtility::BufferToUWord16(protection_length));
+ RtpUtility::BufferToUWord16(protection_length));
// Copy the length recovery field.
memcpy(recovered->length_recovery, &fec_packet->pkt->data[8], 2);
// Copy the first 2 bytes of the FEC header.
@@ -641,8 +658,7 @@ void ForwardErrorCorrection::InitRecovery(const FecPacket* fec_packet,
// Copy the 5th to 8th bytes of the FEC header.
memcpy(&recovered->pkt->data[4], &fec_packet->pkt->data[4], 4);
// Set the SSRC field.
- ModuleRTPUtility::AssignUWord32ToBuffer(&recovered->pkt->data[8],
- fec_packet->ssrc);
+ RtpUtility::AssignUWord32ToBuffer(&recovered->pkt->data[8], fec_packet->ssrc);
}
void ForwardErrorCorrection::FinishRecovery(RecoveredPacket* recovered) {
@@ -651,12 +667,11 @@ void ForwardErrorCorrection::FinishRecovery(RecoveredPacket* recovered) {
recovered->pkt->data[0] &= 0xbf; // Clear the 2nd bit.
// Set the SN field.
- ModuleRTPUtility::AssignUWord16ToBuffer(&recovered->pkt->data[2],
- recovered->seq_num);
+ RtpUtility::AssignUWord16ToBuffer(&recovered->pkt->data[2],
+ recovered->seq_num);
// Recover the packet length.
recovered->pkt->length =
- ModuleRTPUtility::BufferToUWord16(recovered->length_recovery) +
- kRtpHeaderSize;
+ RtpUtility::BufferToUWord16(recovered->length_recovery) + kRtpHeaderSize;
}
void ForwardErrorCorrection::XorPackets(const Packet* src_packet,
@@ -671,8 +686,8 @@ void ForwardErrorCorrection::XorPackets(const Packet* src_packet,
}
// XOR with the network-ordered payload size.
uint8_t media_payload_length[2];
- ModuleRTPUtility::AssignUWord16ToBuffer(media_payload_length,
- src_packet->length - kRtpHeaderSize);
+ RtpUtility::AssignUWord16ToBuffer(media_payload_length,
+ src_packet->length - kRtpHeaderSize);
dst_packet->length_recovery[0] ^= media_payload_length[0];
dst_packet->length_recovery[1] ^= media_payload_length[1];
diff --git a/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h b/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h
index ccf82e5d..f577cbaa 100644
--- a/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h
+++ b/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h
@@ -21,21 +21,21 @@ class MockRTPPayloadStrategy : public RTPPayloadStrategy {
MOCK_CONST_METHOD0(CodecsMustBeUnique,
bool());
MOCK_CONST_METHOD4(PayloadIsCompatible,
- bool(const ModuleRTPUtility::Payload& payload,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate));
+ bool(const RtpUtility::Payload& payload,
+ const uint32_t frequency,
+ const uint8_t channels,
+ const uint32_t rate));
MOCK_CONST_METHOD2(UpdatePayloadRate,
- void(ModuleRTPUtility::Payload* payload, const uint32_t rate));
- MOCK_CONST_METHOD1(GetPayloadTypeFrequency, int(
- const ModuleRTPUtility::Payload& payload));
- MOCK_CONST_METHOD5(CreatePayloadType,
- ModuleRTPUtility::Payload*(
- const char payloadName[RTP_PAYLOAD_NAME_SIZE],
- const int8_t payloadType,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate));
+ void(RtpUtility::Payload* payload, const uint32_t rate));
+ MOCK_CONST_METHOD1(GetPayloadTypeFrequency,
+ int(const RtpUtility::Payload& payload));
+ MOCK_CONST_METHOD5(
+ CreatePayloadType,
+ RtpUtility::Payload*(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+ const int8_t payloadType,
+ const uint32_t frequency,
+ const uint8_t channels,
+ const uint32_t rate));
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/producer_fec.cc b/modules/rtp_rtcp/source/producer_fec.cc
index 3173d3d5..747cd89e 100644
--- a/modules/rtp_rtcp/source/producer_fec.cc
+++ b/modules/rtp_rtcp/source/producer_fec.cc
@@ -61,7 +61,7 @@ void RedPacket::CreateHeader(const uint8_t* rtp_header, int header_length,
void RedPacket::SetSeqNum(int seq_num) {
assert(seq_num >= 0 && seq_num < (1<<16));
- ModuleRTPUtility::AssignUWord16ToBuffer(&data_[2], seq_num);
+ RtpUtility::AssignUWord16ToBuffer(&data_[2], seq_num);
}
void RedPacket::AssignPayload(const uint8_t* payload, int length) {
diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.cc b/modules/rtp_rtcp/source/receive_statistics_impl.cc
index aa7c9c57..e3bc95f7 100644
--- a/modules/rtp_rtcp/source/receive_statistics_impl.cc
+++ b/modules/rtp_rtcp/source/receive_statistics_impl.cc
@@ -133,11 +133,12 @@ void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header,
void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header,
uint32_t receive_time_secs,
uint32_t receive_time_frac) {
- uint32_t receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
- receive_time_secs, receive_time_frac, header.payload_type_frequency);
- uint32_t last_receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
- last_receive_time_secs_, last_receive_time_frac_,
- header.payload_type_frequency);
+ uint32_t receive_time_rtp = RtpUtility::ConvertNTPTimeToRTP(
+ receive_time_secs, receive_time_frac, header.payload_type_frequency);
+ uint32_t last_receive_time_rtp =
+ RtpUtility::ConvertNTPTimeToRTP(last_receive_time_secs_,
+ last_receive_time_frac_,
+ header.payload_type_frequency);
int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) -
(header.timestamp - last_received_timestamp_);
diff --git a/modules/rtp_rtcp/source/rtcp_packet.cc b/modules/rtp_rtcp/source/rtcp_packet.cc
index f6d3bd3d..68da3aeb 100644
--- a/modules/rtp_rtcp/source/rtcp_packet.cc
+++ b/modules/rtp_rtcp/source/rtcp_packet.cc
@@ -61,15 +61,15 @@ void AssignUWord8(uint8_t* buffer, size_t* offset, uint8_t value) {
buffer[(*offset)++] = value;
}
void AssignUWord16(uint8_t* buffer, size_t* offset, uint16_t value) {
- ModuleRTPUtility::AssignUWord16ToBuffer(buffer + *offset, value);
+ RtpUtility::AssignUWord16ToBuffer(buffer + *offset, value);
*offset += 2;
}
void AssignUWord24(uint8_t* buffer, size_t* offset, uint32_t value) {
- ModuleRTPUtility::AssignUWord24ToBuffer(buffer + *offset, value);
+ RtpUtility::AssignUWord24ToBuffer(buffer + *offset, value);
*offset += 3;
}
void AssignUWord32(uint8_t* buffer, size_t* offset, uint32_t value) {
- ModuleRTPUtility::AssignUWord32ToBuffer(buffer + *offset, value);
+ RtpUtility::AssignUWord32ToBuffer(buffer + *offset, value);
*offset += 4;
}
diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc
index 896bd5f4..b38ae1f0 100644
--- a/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -1361,8 +1361,6 @@ int32_t RTCPReceiver::UpdateTMMBR() {
void RTCPReceiver::RegisterRtcpStatisticsCallback(
RtcpStatisticsCallback* callback) {
CriticalSectionScoped cs(_criticalSectionFeedbacks);
- if (callback != NULL)
- assert(stats_callback_ == NULL);
stats_callback_ = callback;
}
diff --git a/modules/rtp_rtcp/source/rtcp_sender.cc b/modules/rtp_rtcp/source/rtcp_sender.cc
index d73de9c4..b9ab0c1e 100644
--- a/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -708,24 +708,24 @@ int32_t RTCPSender::BuildSR(const FeedbackState& feedback_state,
pos++;
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// NTP
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, NTPsec);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, NTPsec);
pos += 4;
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, NTPfrac);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, NTPfrac);
pos += 4;
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, RTPtime);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, RTPtime);
pos += 4;
//sender's packet count
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
- feedback_state.packet_count_sent);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos,
+ feedback_state.packet_count_sent);
pos += 4;
//sender's octet count
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
- feedback_state.byte_count_sent);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos,
+ feedback_state.byte_count_sent);
pos += 4;
uint8_t numberOfReportBlocks = 0;
@@ -741,7 +741,7 @@ int32_t RTCPSender::BuildSR(const FeedbackState& feedback_state,
rtcpbuffer[posNumberOfReportBlocks] += numberOfReportBlocks;
uint16_t len = uint16_t((pos/4) -1);
- ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+2, len);
+ RtpUtility::AssignUWord16ToBuffer(rtcpbuffer + 2, len);
return 0;
}
@@ -767,7 +767,7 @@ int32_t RTCPSender::BuildSDEC(uint8_t* rtcpbuffer, int& pos) {
pos++;
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// CNAME = 1
@@ -802,7 +802,7 @@ int32_t RTCPSender::BuildSDEC(uint8_t* rtcpbuffer, int& pos) {
uint32_t SSRC = it->first;
// Add SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, SSRC);
pos += 4;
// CNAME = 1
@@ -833,8 +833,7 @@ int32_t RTCPSender::BuildSDEC(uint8_t* rtcpbuffer, int& pos) {
}
// in 32-bit words minus one and we don't count the header
uint16_t buffer_length = (SDESLength / 4) - 1;
- ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer + SDESLengthPos,
- buffer_length);
+ RtpUtility::AssignUWord16ToBuffer(rtcpbuffer + SDESLengthPos, buffer_length);
return 0;
}
@@ -859,7 +858,7 @@ RTCPSender::BuildRR(uint8_t* rtcpbuffer,
pos++;
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
uint8_t numberOfReportBlocks = 0;
@@ -874,7 +873,7 @@ RTCPSender::BuildRR(uint8_t* rtcpbuffer,
rtcpbuffer[posNumberOfReportBlocks] += numberOfReportBlocks;
uint16_t len = uint16_t((pos)/4 -1);
- ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+2, len);
+ RtpUtility::AssignUWord16ToBuffer(rtcpbuffer + 2, len);
return 0;
}
@@ -925,8 +924,8 @@ RTCPSender::BuildExtendedJitterReport(
rtcpbuffer[pos++]=(uint8_t)(1);
// Add inter-arrival jitter
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos,
- jitterTransmissionTimeOffset);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos,
+ jitterTransmissionTimeOffset);
pos += 4;
return 0;
}
@@ -949,11 +948,11 @@ RTCPSender::BuildPLI(uint8_t* rtcpbuffer, int& pos)
rtcpbuffer[pos++]=(uint8_t)(2);
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// Add the remote SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
pos += 4;
return 0;
}
@@ -979,7 +978,7 @@ int32_t RTCPSender::BuildFIR(uint8_t* rtcpbuffer,
rtcpbuffer[pos++] = (uint8_t)(4);
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// RFC 5104 4.3.1.2. Semantics
@@ -990,7 +989,7 @@ int32_t RTCPSender::BuildFIR(uint8_t* rtcpbuffer,
rtcpbuffer[pos++] = (uint8_t)0;
// Additional Feedback Control Information (FCI)
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
pos += 4;
rtcpbuffer[pos++] = (uint8_t)(_sequenceNumberFIR);
@@ -1025,18 +1024,18 @@ RTCPSender::BuildSLI(uint8_t* rtcpbuffer, int& pos, const uint8_t pictureID)
rtcpbuffer[pos++]=(uint8_t)(3);
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// Add the remote SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
pos += 4;
// Add first, number & picture ID 6 bits
// first = 0, 13 - bits
// number = 0x1fff, 13 - bits only ones for now
uint32_t sliField = (0x1fff << 6)+ (0x3f & pictureID);
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, sliField);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, sliField);
pos += 4;
return 0;
}
@@ -1090,11 +1089,11 @@ RTCPSender::BuildRPSI(uint8_t* rtcpbuffer,
rtcpbuffer[pos++]=size;
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// Add the remote SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
pos += 4;
// calc padding length
@@ -1147,11 +1146,11 @@ RTCPSender::BuildREMB(uint8_t* rtcpbuffer, int& pos)
rtcpbuffer[pos++]=_lengthRembSSRC + 4;
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// Remote SSRC must be 0
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, 0);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, 0);
pos += 4;
rtcpbuffer[pos++]='R';
@@ -1177,8 +1176,8 @@ RTCPSender::BuildREMB(uint8_t* rtcpbuffer, int& pos)
rtcpbuffer[pos++]=(uint8_t)(brMantissa);
for (int i = 0; i < _lengthRembSSRC; i++)
- {
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _rembSSRC[i]);
+ {
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _rembSSRC[i]);
pos += 4;
}
return 0;
@@ -1264,7 +1263,7 @@ int32_t RTCPSender::BuildTMMBR(ModuleRtpRtcpImpl* rtp_rtcp_module,
rtcpbuffer[pos++]=(uint8_t)(4);
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// RFC 5104 4.2.1.2. Semantics
@@ -1276,7 +1275,7 @@ int32_t RTCPSender::BuildTMMBR(ModuleRtpRtcpImpl* rtp_rtcp_module,
rtcpbuffer[pos++]=(uint8_t)0;
// Additional Feedback Control Information (FCI)
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
pos += 4;
uint32_t bitRate = _tmmbr_Send*1000;
@@ -1324,7 +1323,7 @@ RTCPSender::BuildTMMBN(uint8_t* rtcpbuffer, int& pos)
pos++;
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// RFC 5104 4.2.2.2. Semantics
@@ -1342,7 +1341,7 @@ RTCPSender::BuildTMMBN(uint8_t* rtcpbuffer, int& pos)
if (boundingSet->Tmmbr(n) > 0)
{
uint32_t tmmbrSSRC = boundingSet->Ssrc(n);
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, tmmbrSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, tmmbrSSRC);
pos += 4;
uint32_t bitRate = boundingSet->Tmmbr(n) * 1000;
@@ -1395,11 +1394,11 @@ RTCPSender::BuildAPP(uint8_t* rtcpbuffer, int& pos)
rtcpbuffer[pos++]=(uint8_t)(length);
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// Add our application name
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _appName);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _appName);
pos += 4;
// Add the data
@@ -1433,11 +1432,11 @@ RTCPSender::BuildNACK(uint8_t* rtcpbuffer,
rtcpbuffer[pos++]=(uint8_t)(3); //setting it to one kNACK signal as default
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// Add the remote SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
pos += 4;
NACKStringBuilder stringBuilder;
@@ -1464,9 +1463,9 @@ RTCPSender::BuildNACK(uint8_t* rtcpbuffer,
}
// Write the sequence number and the bitmask to the packet.
assert(pos + 4 < IP_PACKET_SIZE);
- ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer + pos, nack);
+ RtpUtility::AssignUWord16ToBuffer(rtcpbuffer + pos, nack);
pos += 2;
- ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer + pos, bitmask);
+ RtpUtility::AssignUWord16ToBuffer(rtcpbuffer + pos, bitmask);
pos += 2;
numOfNackFields++;
}
@@ -1497,13 +1496,13 @@ RTCPSender::BuildBYE(uint8_t* rtcpbuffer, int& pos)
rtcpbuffer[pos++]=(uint8_t)(1 + _CSRCs);
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// add CSRCs
for(int i = 0; i < _CSRCs; i++)
{
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _CSRC[i]);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _CSRC[i]);
pos += 4;
}
} else
@@ -1517,7 +1516,7 @@ RTCPSender::BuildBYE(uint8_t* rtcpbuffer, int& pos)
rtcpbuffer[pos++]=(uint8_t)1;
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
}
return 0;
@@ -1546,7 +1545,7 @@ int32_t RTCPSender::BuildReceiverReferenceTime(uint8_t* buffer,
buffer[pos++] = 4; // XR packet length.
// Add our own SSRC.
- ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(buffer + pos, _SSRC);
pos += 4;
// 0 1 2 3
@@ -1566,9 +1565,9 @@ int32_t RTCPSender::BuildReceiverReferenceTime(uint8_t* buffer,
buffer[pos++] = 2; // Block length.
// NTP timestamp.
- ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, ntp_sec);
+ RtpUtility::AssignUWord32ToBuffer(buffer + pos, ntp_sec);
pos += 4;
- ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, ntp_frac);
+ RtpUtility::AssignUWord32ToBuffer(buffer + pos, ntp_frac);
pos += 4;
return 0;
@@ -1589,7 +1588,7 @@ int32_t RTCPSender::BuildDlrr(uint8_t* buffer,
buffer[pos++] = 5; // XR packet length.
// Add our own SSRC.
- ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(buffer + pos, _SSRC);
pos += 4;
// 0 1 2 3
@@ -1614,11 +1613,11 @@ int32_t RTCPSender::BuildDlrr(uint8_t* buffer,
buffer[pos++] = 3; // Block length.
// NTP timestamp.
- ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, info.sourceSSRC);
+ RtpUtility::AssignUWord32ToBuffer(buffer + pos, info.sourceSSRC);
pos += 4;
- ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, info.lastRR);
+ RtpUtility::AssignUWord32ToBuffer(buffer + pos, info.lastRR);
pos += 4;
- ModuleRTPUtility::AssignUWord32ToBuffer(buffer + pos, info.delaySinceLastRR);
+ RtpUtility::AssignUWord32ToBuffer(buffer + pos, info.delaySinceLastRR);
pos += 4;
return 0;
@@ -1644,7 +1643,7 @@ RTCPSender::BuildVoIPMetric(uint8_t* rtcpbuffer, int& pos)
pos++;
// Add our own SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
pos += 4;
// Add a VoIP metrics block
@@ -1654,7 +1653,7 @@ RTCPSender::BuildVoIPMetric(uint8_t* rtcpbuffer, int& pos)
rtcpbuffer[pos++]=8;
// Add the remote SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
pos += 4;
rtcpbuffer[pos++] = _xrVoIPMetric.lossRate;
@@ -2210,33 +2209,33 @@ int32_t RTCPSender::WriteReportBlocksToBuffer(
RTCPReportBlock* reportBlock = it->second;
if (reportBlock) {
// Remote SSRC
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+position, remoteSSRC);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + position, remoteSSRC);
position += 4;
// fraction lost
rtcpbuffer[position++] = reportBlock->fractionLost;
// cumulative loss
- ModuleRTPUtility::AssignUWord24ToBuffer(rtcpbuffer+position,
- reportBlock->cumulativeLost);
+ RtpUtility::AssignUWord24ToBuffer(rtcpbuffer + position,
+ reportBlock->cumulativeLost);
position += 3;
// extended highest seq_no, contain the highest sequence number received
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+position,
- reportBlock->extendedHighSeqNum);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + position,
+ reportBlock->extendedHighSeqNum);
position += 4;
// Jitter
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+position,
- reportBlock->jitter);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + position,
+ reportBlock->jitter);
position += 4;
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+position,
- reportBlock->lastSR);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + position,
+ reportBlock->lastSR);
position += 4;
- ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+position,
- reportBlock->delaySinceLastSR);
+ RtpUtility::AssignUWord32ToBuffer(rtcpbuffer + position,
+ reportBlock->delaySinceLastSR);
position += 4;
}
}
diff --git a/modules/rtp_rtcp/source/rtp_fec_unittest.cc b/modules/rtp_rtcp/source/rtp_fec_unittest.cc
index fa847625..9d19fde1 100644
--- a/modules/rtp_rtcp/source/rtp_fec_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_fec_unittest.cc
@@ -155,6 +155,174 @@ TEST_F(RtpFecTest, FecRecoveryWithLoss) {
EXPECT_FALSE(IsRecoveryComplete());
}
+// Verify that we don't use an old FEC packet for FEC decoding.
+TEST_F(RtpFecTest, FecRecoveryWithSeqNumGapTwoFrames) {
+ const int kNumImportantPackets = 0;
+ const bool kUseUnequalProtection = false;
+ uint8_t kProtectionFactor = 20;
+
+ // Two frames: first frame (old) with two media packets and 1 FEC packet.
+ // Second frame (new) with 3 media packets, and no FEC packets.
+ // ---Frame 1---- ----Frame 2------
+ // #0(media) #1(media) #2(FEC) #65535(media) #0(media) #1(media).
+ // If we lose either packet 0 or 1 of second frame, FEC decoding should not
+ // try to decode using "old" FEC packet #2.
+
+ // Construct media packets for first frame, starting at sequence number 0.
+ fec_seq_num_ = ConstructMediaPacketsSeqNum(2, 0);
+
+ EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
+ kNumImportantPackets, kUseUnequalProtection,
+ webrtc::kFecMaskBursty, &fec_packet_list_));
+ // Expect 1 FEC packet.
+ EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+ // Add FEC packet (seq#2) of this first frame to received list (i.e., assume
+ // the two media packet were lost).
+ memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+ ReceivedPackets(fec_packet_list_, fec_loss_mask_, true);
+
+ // Construct media packets for second frame, with sequence number wrap.
+ ClearList(&media_packet_list_);
+ fec_seq_num_ = ConstructMediaPacketsSeqNum(3, 65535);
+
+ // Expect 3 media packets for this frame.
+ EXPECT_EQ(3, static_cast<int>(media_packet_list_.size()));
+
+ // Second media packet lost (seq#0).
+ memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+ media_loss_mask_[1] = 1;
+ // Add packets #65535, and #1 to received list.
+ ReceivedPackets(media_packet_list_, media_loss_mask_, false);
+
+ EXPECT_EQ(0,
+ fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_));
+
+ // Expect that no decoding is done to get missing packet (seq#0) of second
+ // frame, using old FEC packet (seq#2) from first (old) frame. So number of
+ // recovered packets is 2, and not equal to number of media packets (=3).
+ EXPECT_EQ(2, static_cast<int>(recovered_packet_list_.size()));
+ EXPECT_TRUE(recovered_packet_list_.size() != media_packet_list_.size());
+ FreeRecoveredPacketList();
+}
+
+// Verify we can still recovery frame if sequence number wrap occurs within
+// the frame and FEC packet following wrap is received after media packets.
+TEST_F(RtpFecTest, FecRecoveryWithSeqNumGapOneFrameRecovery) {
+ const int kNumImportantPackets = 0;
+ const bool kUseUnequalProtection = false;
+ uint8_t kProtectionFactor = 20;
+
+ // One frame, with sequence number wrap in media packets.
+ // -----Frame 1----
+ // #65534(media) #65535(media) #0(media) #1(FEC).
+ fec_seq_num_ = ConstructMediaPacketsSeqNum(3, 65534);
+
+ EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
+ kNumImportantPackets, kUseUnequalProtection,
+ webrtc::kFecMaskBursty, &fec_packet_list_));
+
+ // Expect 1 FEC packet.
+ EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+ // Lose one media packet (seq# 65535).
+ memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+ memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+ media_loss_mask_[1] = 1;
+ ReceivedPackets(media_packet_list_, media_loss_mask_, false);
+ // Add FEC packet to received list following the media packets.
+ ReceivedPackets(fec_packet_list_, fec_loss_mask_, true);
+
+ EXPECT_EQ(0,
+ fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_));
+
+ // Expect 3 media packets in recovered list, and complete recovery.
+ // Wrap-around won't remove FEC packet, as it follows the wrap.
+ EXPECT_EQ(3, static_cast<int>(recovered_packet_list_.size()));
+ EXPECT_TRUE(IsRecoveryComplete());
+ FreeRecoveredPacketList();
+}
+
+// Sequence number wrap occurs within the FEC packets for the frame.
+// In this case we will discard FEC packet and full recovery is not expected.
+// Same problem will occur if wrap is within media packets but FEC packet is
+// received before the media packets. This may be improved if timing information
+// is used to detect old FEC packets.
+// TODO(marpan): Update test if wrap-around handling changes in FEC decoding.
+TEST_F(RtpFecTest, FecRecoveryWithSeqNumGapOneFrameNoRecovery) {
+ const int kNumImportantPackets = 0;
+ const bool kUseUnequalProtection = false;
+ uint8_t kProtectionFactor = 200;
+
+ // 1 frame: 3 media packets and 2 FEC packets.
+ // Sequence number wrap in FEC packets.
+ // -----Frame 1----
+ // #65532(media) #65533(media) #65534(media) #65535(FEC) #0(FEC).
+ fec_seq_num_ = ConstructMediaPacketsSeqNum(3, 65532);
+
+ EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
+ kNumImportantPackets, kUseUnequalProtection,
+ webrtc::kFecMaskBursty, &fec_packet_list_));
+
+ // Expect 2 FEC packets.
+ EXPECT_EQ(2, static_cast<int>(fec_packet_list_.size()));
+
+ // Lose the last two media packets (seq# 65533, 65534).
+ memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+ memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+ media_loss_mask_[1] = 1;
+ media_loss_mask_[2] = 1;
+ ReceivedPackets(media_packet_list_, media_loss_mask_, false);
+ ReceivedPackets(fec_packet_list_, fec_loss_mask_, true);
+
+ EXPECT_EQ(0,
+ fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_));
+
+ // The two FEC packets are received and should allow for complete recovery,
+ // but because of the wrap the second FEC packet will be discarded, and only
+ // one media packet is recoverable. So exepct 2 media packets on recovered
+ // list and no complete recovery.
+ EXPECT_EQ(2, static_cast<int>(recovered_packet_list_.size()));
+ EXPECT_TRUE(recovered_packet_list_.size() != media_packet_list_.size());
+ EXPECT_FALSE(IsRecoveryComplete());
+ FreeRecoveredPacketList();
+}
+
+// Verify we can still recovery frame if FEC is received before media packets.
+TEST_F(RtpFecTest, FecRecoveryWithFecOutOfOrder) {
+ const int kNumImportantPackets = 0;
+ const bool kUseUnequalProtection = false;
+ uint8_t kProtectionFactor = 20;
+
+ // One frame: 3 media packets, 1 FEC packet.
+ // -----Frame 1----
+ // #0(media) #1(media) #2(media) #3(FEC).
+ fec_seq_num_ = ConstructMediaPacketsSeqNum(3, 0);
+
+ EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
+ kNumImportantPackets, kUseUnequalProtection,
+ webrtc::kFecMaskBursty, &fec_packet_list_));
+
+ // Expect 1 FEC packet.
+ EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+ // Lose one media packet (seq# 1).
+ memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+ memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+ media_loss_mask_[1] = 1;
+ // Add FEC packet to received list before the media packets.
+ ReceivedPackets(fec_packet_list_, fec_loss_mask_, true);
+ // Add media packets to received list.
+ ReceivedPackets(media_packet_list_, media_loss_mask_, false);
+
+ EXPECT_EQ(0,
+ fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_));
+
+ // Expect 3 media packets in recovered list, and complete recovery.
+ EXPECT_EQ(3, static_cast<int>(recovered_packet_list_.size()));
+ EXPECT_TRUE(IsRecoveryComplete());
+ FreeRecoveredPacketList();
+}
+
// Test 50% protection with random mask type: Two cases are considered:
// a 50% non-consecutive loss which can be fully recovered, and a 50%
// consecutive loss which cannot be fully recovered.
@@ -625,8 +793,6 @@ TEST_F(RtpFecTest, FecRecoveryNonConsecutivePacketsWrap) {
EXPECT_FALSE(IsRecoveryComplete());
}
-// TODO(marpan): Add more test cases.
-
void RtpFecTest::TearDown() {
fec_->ResetState(&recovered_packet_list_);
delete fec_;
@@ -700,7 +866,7 @@ void RtpFecTest::ReceivedPackets(const PacketList& packet_list, int* loss_mask,
// For media packets, the sequence number and marker bit is
// obtained from RTP header. These were set in ConstructMediaPackets().
received_packet->seq_num =
- webrtc::ModuleRTPUtility::BufferToUWord16(&packet->data[2]);
+ webrtc::RtpUtility::BufferToUWord16(&packet->data[2]);
} else {
// The sequence number, marker bit, and ssrc number are defined in the
// RTP header of the FEC packet, which is not constructed in this test.
@@ -755,12 +921,11 @@ int RtpFecTest::ConstructMediaPacketsSeqNum(int num_media_packets,
// Only push one (fake) frame to the FEC.
media_packet->data[1] &= 0x7f;
- webrtc::ModuleRTPUtility::AssignUWord16ToBuffer(&media_packet->data[2],
- sequence_number);
- webrtc::ModuleRTPUtility::AssignUWord32ToBuffer(&media_packet->data[4],
- time_stamp);
- webrtc::ModuleRTPUtility::AssignUWord32ToBuffer(&media_packet->data[8],
- ssrc_);
+ webrtc::RtpUtility::AssignUWord16ToBuffer(&media_packet->data[2],
+ sequence_number);
+ webrtc::RtpUtility::AssignUWord32ToBuffer(&media_packet->data[4],
+ time_stamp);
+ webrtc::RtpUtility::AssignUWord32ToBuffer(&media_packet->data[8], ssrc_);
// Generate random values for payload.
for (int j = 12; j < media_packet->length; ++j) {
diff --git a/modules/rtp_rtcp/source/rtp_header_parser.cc b/modules/rtp_rtcp/source/rtp_header_parser.cc
index bb24d4db..3fc26663 100644
--- a/modules/rtp_rtcp/source/rtp_header_parser.cc
+++ b/modules/rtp_rtcp/source/rtp_header_parser.cc
@@ -21,7 +21,8 @@ class RtpHeaderParserImpl : public RtpHeaderParser {
RtpHeaderParserImpl();
virtual ~RtpHeaderParserImpl() {}
- virtual bool Parse(const uint8_t* packet, int length,
+ virtual bool Parse(const uint8_t* packet,
+ size_t length,
RTPHeader* header) const OVERRIDE;
virtual bool RegisterRtpHeaderExtension(RTPExtensionType type,
@@ -31,7 +32,7 @@ class RtpHeaderParserImpl : public RtpHeaderParser {
private:
scoped_ptr<CriticalSectionWrapper> critical_section_;
- RtpHeaderExtensionMap rtp_header_extension_map_;
+ RtpHeaderExtensionMap rtp_header_extension_map_ GUARDED_BY(critical_section_);
};
RtpHeaderParser* RtpHeaderParser::Create() {
@@ -41,14 +42,15 @@ RtpHeaderParser* RtpHeaderParser::Create() {
RtpHeaderParserImpl::RtpHeaderParserImpl()
: critical_section_(CriticalSectionWrapper::CreateCriticalSection()) {}
-bool RtpHeaderParser::IsRtcp(const uint8_t* packet, int length) {
- ModuleRTPUtility::RTPHeaderParser rtp_parser(packet, length);
+bool RtpHeaderParser::IsRtcp(const uint8_t* packet, size_t length) {
+ RtpUtility::RtpHeaderParser rtp_parser(packet, length);
return rtp_parser.RTCP();
}
-bool RtpHeaderParserImpl::Parse(const uint8_t* packet, int length,
- RTPHeader* header) const {
- ModuleRTPUtility::RTPHeaderParser rtp_parser(packet, length);
+bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
+ size_t length,
+ RTPHeader* header) const {
+ RtpUtility::RtpHeaderParser rtp_parser(packet, length);
memset(header, 0, sizeof(*header));
RtpHeaderExtensionMap map;
diff --git a/modules/rtp_rtcp/source/rtp_payload_registry.cc b/modules/rtp_rtcp/source/rtp_payload_registry.cc
index db2e4cd3..ec05a73b 100644
--- a/modules/rtp_rtcp/source/rtp_payload_registry.cc
+++ b/modules/rtp_rtcp/source/rtp_payload_registry.cc
@@ -29,7 +29,7 @@ RTPPayloadRegistry::RTPPayloadRegistry(
RTPPayloadRegistry::~RTPPayloadRegistry() {
while (!payload_type_map_.empty()) {
- ModuleRTPUtility::PayloadTypeMap::iterator it = payload_type_map_.begin();
+ RtpUtility::PayloadTypeMap::iterator it = payload_type_map_.begin();
delete it->second;
payload_type_map_.erase(it);
}
@@ -69,12 +69,12 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
CriticalSectionScoped cs(crit_sect_.get());
- ModuleRTPUtility::PayloadTypeMap::iterator it =
- payload_type_map_.find(payload_type);
+ RtpUtility::PayloadTypeMap::iterator it =
+ payload_type_map_.find(payload_type);
if (it != payload_type_map_.end()) {
// We already use this payload type.
- ModuleRTPUtility::Payload* payload = it->second;
+ RtpUtility::Payload* payload = it->second;
assert(payload);
@@ -83,7 +83,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
// Check if it's the same as we already have.
// If same, ignore sending an error.
if (payload_name_length == name_length &&
- ModuleRTPUtility::StringCompare(
+ RtpUtility::StringCompare(
payload->name, payload_name, payload_name_length)) {
if (rtp_payload_strategy_->PayloadIsCompatible(*payload, frequency,
channels, rate)) {
@@ -100,18 +100,18 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
payload_name, payload_name_length, frequency, channels, rate);
}
- ModuleRTPUtility::Payload* payload = NULL;
+ RtpUtility::Payload* payload = NULL;
// Save the RED payload type. Used in both audio and video.
- if (ModuleRTPUtility::StringCompare(payload_name, "red", 3)) {
+ if (RtpUtility::StringCompare(payload_name, "red", 3)) {
red_payload_type_ = payload_type;
- payload = new ModuleRTPUtility::Payload;
+ payload = new RtpUtility::Payload;
memset(payload, 0, sizeof(*payload));
payload->audio = false;
strncpy(payload->name, payload_name, RTP_PAYLOAD_NAME_SIZE - 1);
- } else if (ModuleRTPUtility::StringCompare(payload_name, "ulpfec", 3)) {
+ } else if (RtpUtility::StringCompare(payload_name, "ulpfec", 3)) {
ulpfec_payload_type_ = payload_type;
- payload = new ModuleRTPUtility::Payload;
+ payload = new RtpUtility::Payload;
memset(payload, 0, sizeof(*payload));
payload->audio = false;
strncpy(payload->name, payload_name, RTP_PAYLOAD_NAME_SIZE - 1);
@@ -132,7 +132,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
int32_t RTPPayloadRegistry::DeRegisterReceivePayload(
const int8_t payload_type) {
CriticalSectionScoped cs(crit_sect_.get());
- ModuleRTPUtility::PayloadTypeMap::iterator it =
+ RtpUtility::PayloadTypeMap::iterator it =
payload_type_map_.find(payload_type);
assert(it != payload_type_map_.end());
delete it->second;
@@ -149,15 +149,14 @@ void RTPPayloadRegistry::DeregisterAudioCodecOrRedTypeRegardlessOfPayloadType(
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) {
- ModuleRTPUtility::PayloadTypeMap::iterator iterator =
- payload_type_map_.begin();
+ RtpUtility::PayloadTypeMap::iterator iterator = payload_type_map_.begin();
for (; iterator != payload_type_map_.end(); ++iterator) {
- ModuleRTPUtility::Payload* payload = iterator->second;
+ RtpUtility::Payload* payload = iterator->second;
size_t name_length = strlen(payload->name);
- if (payload_name_length == name_length
- && ModuleRTPUtility::StringCompare(payload->name, payload_name,
- payload_name_length)) {
+ if (payload_name_length == name_length &&
+ RtpUtility::StringCompare(
+ payload->name, payload_name, payload_name_length)) {
// We found the payload name in the list.
// If audio, check frequency and rate.
if (payload->audio) {
@@ -168,7 +167,7 @@ void RTPPayloadRegistry::DeregisterAudioCodecOrRedTypeRegardlessOfPayloadType(
payload_type_map_.erase(iterator);
break;
}
- } else if (ModuleRTPUtility::StringCompare(payload_name, "red", 3)) {
+ } else if (RtpUtility::StringCompare(payload_name, "red", 3)) {
delete payload;
payload_type_map_.erase(iterator);
break;
@@ -188,16 +187,15 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
CriticalSectionScoped cs(crit_sect_.get());
- ModuleRTPUtility::PayloadTypeMap::const_iterator it =
- payload_type_map_.begin();
+ RtpUtility::PayloadTypeMap::const_iterator it = payload_type_map_.begin();
for (; it != payload_type_map_.end(); ++it) {
- ModuleRTPUtility::Payload* payload = it->second;
+ RtpUtility::Payload* payload = it->second;
assert(payload);
size_t name_length = strlen(payload->name);
if (payload_name_length == name_length &&
- ModuleRTPUtility::StringCompare(
+ RtpUtility::StringCompare(
payload->name, payload_name, payload_name_length)) {
// Name matches.
if (payload->audio) {
@@ -261,9 +259,9 @@ bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t** restored_packet,
*packet_length -= kRtxHeaderSize;
// Replace the SSRC and the sequence number with the originals.
- ModuleRTPUtility::AssignUWord16ToBuffer(*restored_packet + 2,
- original_sequence_number);
- ModuleRTPUtility::AssignUWord32ToBuffer(*restored_packet + 8, original_ssrc);
+ RtpUtility::AssignUWord16ToBuffer(*restored_packet + 2,
+ original_sequence_number);
+ RtpUtility::AssignUWord32ToBuffer(*restored_packet + 8, original_ssrc);
CriticalSectionScoped cs(crit_sect_.get());
@@ -307,8 +305,8 @@ bool RTPPayloadRegistry::IsEncapsulated(const RTPHeader& header) const {
bool RTPPayloadRegistry::GetPayloadSpecifics(uint8_t payload_type,
PayloadUnion* payload) const {
CriticalSectionScoped cs(crit_sect_.get());
- ModuleRTPUtility::PayloadTypeMap::const_iterator it =
- payload_type_map_.find(payload_type);
+ RtpUtility::PayloadTypeMap::const_iterator it =
+ payload_type_map_.find(payload_type);
// Check that this is a registered payload type.
if (it == payload_type_map_.end()) {
@@ -320,7 +318,7 @@ bool RTPPayloadRegistry::GetPayloadSpecifics(uint8_t payload_type,
int RTPPayloadRegistry::GetPayloadTypeFrequency(
uint8_t payload_type) const {
- ModuleRTPUtility::Payload* payload;
+ RtpUtility::Payload* payload;
if (!PayloadTypeToPayload(payload_type, payload)) {
return -1;
}
@@ -329,12 +327,12 @@ int RTPPayloadRegistry::GetPayloadTypeFrequency(
}
bool RTPPayloadRegistry::PayloadTypeToPayload(
- const uint8_t payload_type,
- ModuleRTPUtility::Payload*& payload) const {
+ const uint8_t payload_type,
+ RtpUtility::Payload*& payload) const {
CriticalSectionScoped cs(crit_sect_.get());
- ModuleRTPUtility::PayloadTypeMap::const_iterator it =
- payload_type_map_.find(payload_type);
+ RtpUtility::PayloadTypeMap::const_iterator it =
+ payload_type_map_.find(payload_type);
// Check that this is a registered payload type.
if (it == payload_type_map_.end()) {
@@ -365,11 +363,10 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
public:
virtual bool CodecsMustBeUnique() const OVERRIDE { return true; }
- virtual bool PayloadIsCompatible(
- const ModuleRTPUtility::Payload& payload,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate) const OVERRIDE {
+ virtual bool PayloadIsCompatible(const RtpUtility::Payload& payload,
+ const uint32_t frequency,
+ const uint8_t channels,
+ const uint32_t rate) const OVERRIDE {
return
payload.audio &&
payload.typeSpecific.Audio.frequency == frequency &&
@@ -378,19 +375,18 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
payload.typeSpecific.Audio.rate == 0 || rate == 0);
}
- virtual void UpdatePayloadRate(
- ModuleRTPUtility::Payload* payload,
- const uint32_t rate) const OVERRIDE {
+ virtual void UpdatePayloadRate(RtpUtility::Payload* payload,
+ const uint32_t rate) const OVERRIDE {
payload->typeSpecific.Audio.rate = rate;
}
- virtual ModuleRTPUtility::Payload* CreatePayloadType(
+ virtual RtpUtility::Payload* CreatePayloadType(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) const OVERRIDE {
- ModuleRTPUtility::Payload* payload = new ModuleRTPUtility::Payload;
+ RtpUtility::Payload* payload = new RtpUtility::Payload;
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
assert(frequency >= 1000);
@@ -401,8 +397,7 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
return payload;
}
- int GetPayloadTypeFrequency(
- const ModuleRTPUtility::Payload& payload) const {
+ int GetPayloadTypeFrequency(const RtpUtility::Payload& payload) const {
return payload.typeSpecific.Audio.frequency;
}
};
@@ -411,37 +406,37 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
public:
virtual bool CodecsMustBeUnique() const OVERRIDE { return false; }
- virtual bool PayloadIsCompatible(
- const ModuleRTPUtility::Payload& payload,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate) const OVERRIDE {
+ virtual bool PayloadIsCompatible(const RtpUtility::Payload& payload,
+ const uint32_t frequency,
+ const uint8_t channels,
+ const uint32_t rate) const OVERRIDE {
return !payload.audio;
}
- virtual void UpdatePayloadRate(
- ModuleRTPUtility::Payload* payload,
- const uint32_t rate) const OVERRIDE {
+ virtual void UpdatePayloadRate(RtpUtility::Payload* payload,
+ const uint32_t rate) const OVERRIDE {
payload->typeSpecific.Video.maxRate = rate;
}
- virtual ModuleRTPUtility::Payload* CreatePayloadType(
+ virtual RtpUtility::Payload* CreatePayloadType(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate) const OVERRIDE {
RtpVideoCodecTypes videoType = kRtpVideoGeneric;
- if (ModuleRTPUtility::StringCompare(payloadName, "VP8", 3)) {
+ if (RtpUtility::StringCompare(payloadName, "VP8", 3)) {
videoType = kRtpVideoVp8;
- } else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
+ } else if (RtpUtility::StringCompare(payloadName, "H264", 4)) {
+ videoType = kRtpVideoH264;
+ } else if (RtpUtility::StringCompare(payloadName, "I420", 4)) {
videoType = kRtpVideoGeneric;
- } else if (ModuleRTPUtility::StringCompare(payloadName, "ULPFEC", 6)) {
+ } else if (RtpUtility::StringCompare(payloadName, "ULPFEC", 6)) {
videoType = kRtpVideoNone;
} else {
videoType = kRtpVideoGeneric;
}
- ModuleRTPUtility::Payload* payload = new ModuleRTPUtility::Payload;
+ RtpUtility::Payload* payload = new RtpUtility::Payload;
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
@@ -451,8 +446,7 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
return payload;
}
- int GetPayloadTypeFrequency(
- const ModuleRTPUtility::Payload& payload) const {
+ int GetPayloadTypeFrequency(const RtpUtility::Payload& payload) const {
return kVideoPayloadTypeFrequency;
}
};
diff --git a/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc b/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
index c03ffcd1..2dacbdd1 100644
--- a/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
@@ -36,18 +36,19 @@ class RtpPayloadRegistryTest : public ::testing::Test {
}
protected:
- ModuleRTPUtility::Payload* ExpectReturnOfTypicalAudioPayload(
- uint8_t payload_type, uint32_t rate) {
+ RtpUtility::Payload* ExpectReturnOfTypicalAudioPayload(uint8_t payload_type,
+ uint32_t rate) {
bool audio = true;
- ModuleRTPUtility::Payload returned_payload = { "name", audio, {
- // Initialize the audio struct in this case.
- { kTypicalFrequency, kTypicalChannels, rate }
- }};
+ RtpUtility::Payload returned_payload = {
+ "name",
+ audio,
+ {// Initialize the audio struct in this case.
+ {kTypicalFrequency, kTypicalChannels, rate}}};
// Note: we return a new payload since the payload registry takes ownership
// of the created object.
- ModuleRTPUtility::Payload* returned_payload_on_heap =
- new ModuleRTPUtility::Payload(returned_payload);
+ RtpUtility::Payload* returned_payload_on_heap =
+ new RtpUtility::Payload(returned_payload);
EXPECT_CALL(*mock_payload_strategy_,
CreatePayloadType(kTypicalPayloadName, payload_type,
kTypicalFrequency,
@@ -62,7 +63,7 @@ class RtpPayloadRegistryTest : public ::testing::Test {
TEST_F(RtpPayloadRegistryTest, RegistersAndRemembersPayloadsUntilDeregistered) {
uint8_t payload_type = 97;
- ModuleRTPUtility::Payload* returned_payload_on_heap =
+ RtpUtility::Payload* returned_payload_on_heap =
ExpectReturnOfTypicalAudioPayload(payload_type, kTypicalRate);
bool new_payload_created = false;
@@ -72,7 +73,7 @@ TEST_F(RtpPayloadRegistryTest, RegistersAndRemembersPayloadsUntilDeregistered) {
EXPECT_TRUE(new_payload_created) << "A new payload WAS created.";
- ModuleRTPUtility::Payload* retrieved_payload = NULL;
+ RtpUtility::Payload* retrieved_payload = NULL;
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type,
retrieved_payload));
@@ -99,7 +100,7 @@ TEST_F(RtpPayloadRegistryTest, DoesNotCreateNewPayloadTypeIfRed) {
ASSERT_EQ(red_type_of_the_day, rtp_payload_registry_->red_payload_type());
- ModuleRTPUtility::Payload* retrieved_payload = NULL;
+ RtpUtility::Payload* retrieved_payload = NULL;
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(red_type_of_the_day,
retrieved_payload));
EXPECT_FALSE(retrieved_payload->audio);
@@ -111,7 +112,7 @@ TEST_F(RtpPayloadRegistryTest,
uint8_t payload_type = 97;
bool ignored = false;
- ModuleRTPUtility::Payload* first_payload_on_heap =
+ RtpUtility::Payload* first_payload_on_heap =
ExpectReturnOfTypicalAudioPayload(payload_type, kTypicalRate);
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
kTypicalPayloadName, payload_type, kTypicalFrequency, kTypicalChannels,
@@ -121,7 +122,7 @@ TEST_F(RtpPayloadRegistryTest,
kTypicalPayloadName, payload_type, kTypicalFrequency, kTypicalChannels,
kTypicalRate, &ignored)) << "Adding same codec twice = bad.";
- ModuleRTPUtility::Payload* second_payload_on_heap =
+ RtpUtility::Payload* second_payload_on_heap =
ExpectReturnOfTypicalAudioPayload(payload_type - 1, kTypicalRate);
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
kTypicalPayloadName, payload_type - 1, kTypicalFrequency,
@@ -129,7 +130,7 @@ TEST_F(RtpPayloadRegistryTest,
"With a different payload type is fine though.";
// Ensure both payloads are preserved.
- ModuleRTPUtility::Payload* retrieved_payload = NULL;
+ RtpUtility::Payload* retrieved_payload = NULL;
EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type,
retrieved_payload));
EXPECT_EQ(first_payload_on_heap, retrieved_payload);
@@ -168,7 +169,7 @@ TEST_F(RtpPayloadRegistryTest,
kTypicalPayloadName, payload_type - 1, kTypicalFrequency,
kTypicalChannels, kTypicalRate, &ignored));
- ModuleRTPUtility::Payload* retrieved_payload = NULL;
+ RtpUtility::Payload* retrieved_payload = NULL;
EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(
payload_type, retrieved_payload)) << "The first payload should be "
"deregistered because the only thing that differs is payload type.";
diff --git a/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/modules/rtp_rtcp/source/rtp_receiver_audio.cc
index c8104cc3..05eefbe0 100644
--- a/modules/rtp_rtcp/source/rtp_receiver_audio.cc
+++ b/modules/rtp_rtcp/source/rtp_receiver_audio.cc
@@ -159,10 +159,10 @@ int32_t RTPReceiverAudio::OnNewPayloadTypeCreated(
uint32_t frequency) {
CriticalSectionScoped lock(crit_sect_.get());
- if (ModuleRTPUtility::StringCompare(payload_name, "telephone-event", 15)) {
+ if (RtpUtility::StringCompare(payload_name, "telephone-event", 15)) {
telephone_event_payload_type_ = payload_type;
}
- if (ModuleRTPUtility::StringCompare(payload_name, "cn", 2)) {
+ if (RtpUtility::StringCompare(payload_name, "cn", 2)) {
// we can have three CNG on 8000Hz, 16000Hz and 32000Hz
if (frequency == 8000) {
cng_nb_payload_type_ = payload_type;
diff --git a/modules/rtp_rtcp/source/rtp_receiver_audio.h b/modules/rtp_rtcp/source/rtp_receiver_audio.h
index 0ffd4bf4..4fb7256d 100644
--- a/modules/rtp_rtcp/source/rtp_receiver_audio.h
+++ b/modules/rtp_rtcp/source/rtp_receiver_audio.h
@@ -83,7 +83,7 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
// We do not allow codecs to have multiple payload types for audio, so we
// need to override the default behavior (which is to do nothing).
void PossiblyRemoveExistingPayloadType(
- ModuleRTPUtility::PayloadTypeMap* payload_type_map,
+ RtpUtility::PayloadTypeMap* payload_type_map,
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
size_t payload_name_length,
uint32_t frequency,
diff --git a/modules/rtp_rtcp/source/rtp_receiver_impl.cc b/modules/rtp_rtcp/source/rtp_receiver_impl.cc
index d92618f2..7493488d 100644
--- a/modules/rtp_rtcp/source/rtp_receiver_impl.cc
+++ b/modules/rtp_rtcp/source/rtp_receiver_impl.cc
@@ -22,10 +22,10 @@
namespace webrtc {
-using ModuleRTPUtility::GetCurrentRTP;
-using ModuleRTPUtility::Payload;
-using ModuleRTPUtility::RTPPayloadParser;
-using ModuleRTPUtility::StringCompare;
+using RtpUtility::GetCurrentRTP;
+using RtpUtility::Payload;
+using RtpUtility::RTPPayloadParser;
+using RtpUtility::StringCompare;
RtpReceiver* RtpReceiver::CreateVideoReceiver(
int id, Clock* clock,
diff --git a/modules/rtp_rtcp/source/rtp_receiver_video.cc b/modules/rtp_rtcp/source/rtp_receiver_video.cc
index 5bb519f6..c058ed5d 100644
--- a/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -113,6 +113,8 @@ int32_t RTPReceiverVideo::ParseVideoCodecSpecific(
return ReceiveGenericCodec(rtp_header, payload_data, payload_data_length);
case kRtpVideoVp8:
return ReceiveVp8Codec(rtp_header, payload_data, payload_data_length);
+ case kRtpVideoH264:
+ assert(false); // Not yet supported.
case kRtpVideoNone:
break;
}
@@ -127,12 +129,11 @@ int32_t RTPReceiverVideo::BuildRTPheader(
if (rtp_header->header.markerBit) {
data_buffer[1] |= kRtpMarkerBitMask; // MarkerBit is 1
}
- ModuleRTPUtility::AssignUWord16ToBuffer(data_buffer + 2,
- rtp_header->header.sequenceNumber);
- ModuleRTPUtility::AssignUWord32ToBuffer(data_buffer + 4,
- rtp_header->header.timestamp);
- ModuleRTPUtility::AssignUWord32ToBuffer(data_buffer + 8,
- rtp_header->header.ssrc);
+ RtpUtility::AssignUWord16ToBuffer(data_buffer + 2,
+ rtp_header->header.sequenceNumber);
+ RtpUtility::AssignUWord32ToBuffer(data_buffer + 4,
+ rtp_header->header.timestamp);
+ RtpUtility::AssignUWord32ToBuffer(data_buffer + 8, rtp_header->header.ssrc);
int32_t rtp_header_length = 12;
@@ -144,8 +145,7 @@ int32_t RTPReceiverVideo::BuildRTPheader(
}
uint8_t* ptr = &data_buffer[rtp_header_length];
for (uint32_t i = 0; i < rtp_header->header.numCSRCs; ++i) {
- ModuleRTPUtility::AssignUWord32ToBuffer(ptr,
- rtp_header->header.arrOfCSRCs[i]);
+ RtpUtility::AssignUWord32ToBuffer(ptr, rtp_header->header.arrOfCSRCs[i]);
ptr += 4;
}
data_buffer[0] = (data_buffer[0] & 0xf0) | rtp_header->header.numCSRCs;
@@ -158,8 +158,8 @@ int32_t RTPReceiverVideo::BuildRTPheader(
int32_t RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length) {
- ModuleRTPUtility::RTPPayload parsed_packet;
- ModuleRTPUtility::RTPPayloadParser rtp_payload_parser(
+ RtpUtility::RTPPayload parsed_packet;
+ RtpUtility::RTPPayloadParser rtp_payload_parser(
kRtpVideoVp8, payload_data, payload_data_length);
if (!rtp_payload_parser.Parse(parsed_packet))
@@ -168,11 +168,12 @@ int32_t RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
if (parsed_packet.info.VP8.dataLength == 0)
return 0;
- rtp_header->frameType = (parsed_packet.frameType == ModuleRTPUtility::kIFrame)
- ? kVideoFrameKey : kVideoFrameDelta;
+ rtp_header->frameType = (parsed_packet.frameType == RtpUtility::kIFrame)
+ ? kVideoFrameKey
+ : kVideoFrameDelta;
RTPVideoHeaderVP8* to_header = &rtp_header->type.Video.codecHeader.VP8;
- ModuleRTPUtility::RTPPayloadVP8* from_header = &parsed_packet.info.VP8;
+ RtpUtility::RTPPayloadVP8* from_header = &parsed_packet.info.VP8;
rtp_header->type.Video.isFirstPacket =
from_header->beginningOfPartition && (from_header->partitionID == 0);
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 70fe7174..855d51b7 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -37,7 +37,8 @@ RtpRtcp::Configuration::Configuration()
rtt_stats(NULL),
audio_messages(NullObjectRtpAudioFeedback()),
remote_bitrate_estimator(NULL),
- paced_sender(NULL) {
+ paced_sender(NULL),
+ send_bitrate_observer(NULL) {
}
RtpRtcp* RtpRtcp::CreateRtpRtcp(const RtpRtcp::Configuration& configuration) {
@@ -60,7 +61,8 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
configuration.clock,
configuration.outgoing_transport,
configuration.audio_messages,
- configuration.paced_sender),
+ configuration.paced_sender,
+ configuration.send_bitrate_observer),
rtcp_sender_(configuration.id,
configuration.audio,
configuration.clock,
@@ -333,6 +335,42 @@ int32_t ModuleRtpRtcpImpl::SetSequenceNumber(
return 0; // TODO(pwestin): change to void.
}
+void ModuleRtpRtcpImpl::SetRtpStateForSsrc(uint32_t ssrc,
+ const RtpState& rtp_state) {
+ if (rtp_sender_.SSRC() == ssrc) {
+ rtp_sender_.SetRtpState(rtp_state);
+ return;
+ }
+ if (rtp_sender_.RtxSsrc() == ssrc) {
+ rtp_sender_.SetRtxRtpState(rtp_state);
+ return;
+ }
+
+ CriticalSectionScoped lock(critical_section_module_ptrs_.get());
+ for (size_t i = 0; i < child_modules_.size(); ++i) {
+ child_modules_[i]->SetRtpStateForSsrc(ssrc, rtp_state);
+ }
+}
+
+bool ModuleRtpRtcpImpl::GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) {
+ if (rtp_sender_.SSRC() == ssrc) {
+ *rtp_state = rtp_sender_.GetRtpState();
+ return true;
+ }
+
+ if (rtp_sender_.RtxSsrc() == ssrc) {
+ *rtp_state = rtp_sender_.GetRtxRtpState();
+ return true;
+ }
+
+ CriticalSectionScoped lock(critical_section_module_ptrs_.get());
+ for (size_t i = 0; i < child_modules_.size(); ++i) {
+ if (child_modules_[i]->GetRtpStateForSsrc(ssrc, rtp_state))
+ return true;
+ }
+ return false;
+}
+
uint32_t ModuleRtpRtcpImpl::SSRC() const {
return rtp_sender_.SSRC();
}
@@ -1198,16 +1236,6 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
*nack_rate = rtp_sender_.NackOverheadRate();
}
-void ModuleRtpRtcpImpl::RegisterVideoBitrateObserver(
- BitrateStatisticsObserver* observer) {
- assert(!IsDefaultModule());
- rtp_sender_.RegisterBitrateObserver(observer);
-}
-
-BitrateStatisticsObserver* ModuleRtpRtcpImpl::GetVideoBitrateObserver() const {
- return rtp_sender_.GetBitrateObserver();
-}
-
void ModuleRtpRtcpImpl::OnRequestIntraFrame() {
RequestKeyFrame();
}
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index 55826b6f..b65131fb 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -73,6 +73,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Set SequenceNumber, default is a random number.
virtual int32_t SetSequenceNumber(const uint16_t seq) OVERRIDE;
+ virtual void SetRtpStateForSsrc(uint32_t ssrc,
+ const RtpState& rtp_state) OVERRIDE;
+ virtual bool GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) OVERRIDE;
+
virtual uint32_t SSRC() const OVERRIDE;
// Configure SSRC, default is a random number.
@@ -340,11 +344,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
uint32_t* fec_rate,
uint32_t* nackRate) const OVERRIDE;
- virtual void RegisterVideoBitrateObserver(BitrateStatisticsObserver* observer)
- OVERRIDE;
-
- virtual BitrateStatisticsObserver* GetVideoBitrateObserver() const OVERRIDE;
-
virtual uint32_t SendTimeOfSendReport(const uint32_t send_report);
virtual bool SendTimeOfXrRrReport(uint32_t mid_ntp, int64_t* time_ms) const;
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index eb76cfe7..930778c3 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -232,8 +232,9 @@ class RtpSendingTestTransport : public Transport {
virtual int SendPacket(int channel, const void* data, int length) {
RTPHeader header;
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
- EXPECT_TRUE(
- parser->Parse(static_cast<const uint8_t*>(data), length, &header));
+ EXPECT_TRUE(parser->Parse(static_cast<const uint8_t*>(data),
+ static_cast<size_t>(length),
+ &header));
bytes_received_[header.ssrc] += length;
++packets_received_[header.ssrc];
return length;
diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc
index 74947693..858fc42a 100644
--- a/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/modules/rtp_rtcp/source/rtp_sender.cc
@@ -16,6 +16,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -44,7 +45,8 @@ RTPSender::RTPSender(const int32_t id,
Clock* clock,
Transport* transport,
RtpAudioFeedback* audio_feedback,
- PacedSender* paced_sender)
+ PacedSender* paced_sender,
+ BitrateStatisticsObserver* bitrate_callback)
: clock_(clock),
bitrate_sent_(clock, this),
id_(id),
@@ -71,10 +73,10 @@ RTPSender::RTPSender(const int32_t id,
statistics_crit_(CriticalSectionWrapper::CreateCriticalSection()),
frame_count_observer_(NULL),
rtp_stats_callback_(NULL),
- bitrate_callback_(NULL),
+ bitrate_callback_(bitrate_callback),
// RTP variables
- start_time_stamp_forced_(false),
- start_time_stamp_(0),
+ start_timestamp_forced_(false),
+ start_timestamp_(0),
ssrc_db_(*SSRCDatabase::GetSSRCDatabase()),
remote_ssrc_(0),
sequence_number_forced_(false),
@@ -118,7 +120,7 @@ RTPSender::~RTPSender() {
SSRCDatabase::ReturnSSRCDatabase();
delete send_critsect_;
while (!payload_type_map_.empty()) {
- std::map<int8_t, ModuleRTPUtility::Payload *>::iterator it =
+ std::map<int8_t, RtpUtility::Payload*>::iterator it =
payload_type_map_.begin();
delete it->second;
payload_type_map_.erase(it);
@@ -223,17 +225,17 @@ int32_t RTPSender::RegisterPayload(
assert(payload_name);
CriticalSectionScoped cs(send_critsect_);
- std::map<int8_t, ModuleRTPUtility::Payload *>::iterator it =
+ std::map<int8_t, RtpUtility::Payload*>::iterator it =
payload_type_map_.find(payload_number);
if (payload_type_map_.end() != it) {
// We already use this payload type.
- ModuleRTPUtility::Payload *payload = it->second;
+ RtpUtility::Payload* payload = it->second;
assert(payload);
// Check if it's the same as we already have.
- if (ModuleRTPUtility::StringCompare(payload->name, payload_name,
- RTP_PAYLOAD_NAME_SIZE - 1)) {
+ if (RtpUtility::StringCompare(
+ payload->name, payload_name, RTP_PAYLOAD_NAME_SIZE - 1)) {
if (audio_configured_ && payload->audio &&
payload->typeSpecific.Audio.frequency == frequency &&
(payload->typeSpecific.Audio.rate == rate ||
@@ -249,7 +251,7 @@ int32_t RTPSender::RegisterPayload(
return -1;
}
int32_t ret_val = -1;
- ModuleRTPUtility::Payload *payload = NULL;
+ RtpUtility::Payload* payload = NULL;
if (audio_configured_) {
ret_val = audio_->RegisterAudioPayload(payload_name, payload_number,
frequency, channels, rate, payload);
@@ -267,13 +269,13 @@ int32_t RTPSender::DeRegisterSendPayload(
const int8_t payload_type) {
CriticalSectionScoped lock(send_critsect_);
- std::map<int8_t, ModuleRTPUtility::Payload *>::iterator it =
+ std::map<int8_t, RtpUtility::Payload*>::iterator it =
payload_type_map_.find(payload_type);
if (payload_type_map_.end() == it) {
return -1;
}
- ModuleRTPUtility::Payload *payload = it->second;
+ RtpUtility::Payload* payload = it->second;
delete payload;
payload_type_map_.erase(it);
return 0;
@@ -303,12 +305,17 @@ int32_t RTPSender::SetMaxPayloadLength(
}
uint16_t RTPSender::MaxDataPayloadLength() const {
+ int rtx;
+ {
+ CriticalSectionScoped rtx_lock(send_critsect_);
+ rtx = rtx_;
+ }
if (audio_configured_) {
return max_payload_length_ - RTPHeaderLength();
} else {
return max_payload_length_ - RTPHeaderLength() // RTP overhead.
- video_->FECPacketOverhead() // FEC/ULP/RED overhead.
- - ((rtx_) ? 2 : 0); // RTX overhead.
+ - ((rtx) ? 2 : 0); // RTX overhead.
}
}
@@ -328,6 +335,11 @@ void RTPSender::SetRtxSsrc(uint32_t ssrc) {
ssrc_rtx_ = ssrc;
}
+uint32_t RTPSender::RtxSsrc() const {
+ CriticalSectionScoped cs(send_critsect_);
+ return ssrc_rtx_;
+}
+
void RTPSender::RTXStatus(int* mode, uint32_t* ssrc,
int* payload_type) const {
CriticalSectionScoped cs(send_critsect_);
@@ -365,14 +377,14 @@ int32_t RTPSender::CheckPayloadType(const int8_t payload_type,
}
return 0;
}
- std::map<int8_t, ModuleRTPUtility::Payload *>::iterator it =
+ std::map<int8_t, RtpUtility::Payload*>::iterator it =
payload_type_map_.find(payload_type);
if (it == payload_type_map_.end()) {
LOG(LS_WARNING) << "Payload type " << payload_type << " not registered.";
return -1;
}
payload_type_ = payload_type;
- ModuleRTPUtility::Payload *payload = it->second;
+ RtpUtility::Payload* payload = it->second;
assert(payload);
if (!payload->audio && !audio_configured_) {
video_->SetVideoCodecType(payload->typeSpecific.Video.videoCodecType);
@@ -388,9 +400,11 @@ int32_t RTPSender::SendOutgoingData(
const uint8_t *payload_data, const uint32_t payload_size,
const RTPFragmentationHeader *fragmentation,
VideoCodecInformation *codec_info, const RTPVideoTypeHeader *rtp_type_hdr) {
+ uint32_t ssrc;
{
// Drop this packet if we're not sending media packets.
CriticalSectionScoped cs(send_critsect_);
+ ssrc = ssrc_;
if (!sending_media_) {
return 0;
}
@@ -434,17 +448,13 @@ int32_t RTPSender::SendOutgoingData(
CriticalSectionScoped cs(statistics_crit_.get());
uint32_t frame_count = ++frame_counts_[frame_type];
if (frame_count_observer_) {
- frame_count_observer_->FrameCountUpdated(frame_type,
- frame_count,
- ssrc_);
+ frame_count_observer_->FrameCountUpdated(frame_type, frame_count, ssrc);
}
return ret_val;
}
int RTPSender::SendRedundantPayloads(int payload_type, int bytes_to_send) {
- if (!(rtx_ & kRtxRedundantPayloads))
- return 0;
uint8_t buffer[IP_PACKET_SIZE];
int bytes_left = bytes_to_send;
while (bytes_left > 0) {
@@ -456,7 +466,7 @@ int RTPSender::SendRedundantPayloads(int payload_type, int bytes_to_send) {
}
if (!PrepareAndSendPacket(buffer, length, capture_time_ms, true, false))
return -1;
- ModuleRTPUtility::RTPHeaderParser rtp_parser(buffer, length);
+ RtpUtility::RtpHeaderParser rtp_parser(buffer, length);
RTPHeader rtp_header;
rtp_parser.Parse(rtp_header);
bytes_left -= length - rtp_header.headerLength;
@@ -492,7 +502,7 @@ bool RTPSender::SendPaddingAccordingToBitrate(
CriticalSectionScoped cs(send_critsect_);
// Add the random RTP timestamp offset and store the capture time for
// later calculation of the send time offset.
- timestamp = start_time_stamp_ + capture_timestamp;
+ timestamp = start_timestamp_ + capture_timestamp;
timestamp_ = timestamp;
capture_time_ms_ = capture_time_ms;
last_timestamp_time_ms_ = clock_->TimeInMilliseconds();
@@ -566,6 +576,7 @@ int RTPSender::SendPadData(int payload_type, uint32_t timestamp,
++sequence_number_rtx_;
}
}
+
uint8_t padding_packet[IP_PACKET_SIZE];
int header_length = CreateRTPHeader(padding_packet, payload_type, ssrc,
false, timestamp, sequence_number, NULL,
@@ -604,16 +615,21 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
}
if (paced_sender_) {
- ModuleRTPUtility::RTPHeaderParser rtp_parser(data_buffer, length);
+ RtpUtility::RtpHeaderParser rtp_parser(data_buffer, length);
RTPHeader header;
if (!rtp_parser.Parse(header)) {
assert(false);
return -1;
}
+ // Convert from TickTime to Clock since capture_time_ms is based on
+ // TickTime.
+ // TODO(holmer): Remove this conversion when we remove the use of TickTime.
+ int64_t clock_delta_ms = clock_->TimeInMilliseconds() -
+ TickTime::MillisecondTimestamp();
if (!paced_sender_->SendPacket(PacedSender::kHighPriority,
header.ssrc,
header.sequenceNumber,
- capture_time_ms,
+ capture_time_ms + clock_delta_ms,
length - header.headerLength,
true)) {
// We can't send the packet right now.
@@ -622,6 +638,7 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
}
}
+ CriticalSectionScoped lock(send_critsect_);
return PrepareAndSendPacket(data_buffer, length, capture_time_ms,
(rtx_ & kRtxRetransmitted) > 0, true) ?
length : -1;
@@ -778,8 +795,15 @@ bool RTPSender::TimeToSendPacket(uint16_t sequence_number,
if (!retransmission && capture_time_ms > 0) {
UpdateDelayStatistics(capture_time_ms, clock_->TimeInMilliseconds());
}
- return PrepareAndSendPacket(data_buffer, length, capture_time_ms,
- retransmission && (rtx_ & kRtxRetransmitted) > 0,
+ int rtx;
+ {
+ CriticalSectionScoped lock(send_critsect_);
+ rtx = rtx_;
+ }
+ return PrepareAndSendPacket(data_buffer,
+ length,
+ capture_time_ms,
+ retransmission && (rtx & kRtxRetransmitted) > 0,
retransmission);
}
@@ -790,7 +814,7 @@ bool RTPSender::PrepareAndSendPacket(uint8_t* buffer,
bool is_retransmit) {
uint8_t *buffer_to_send_ptr = buffer;
- ModuleRTPUtility::RTPHeaderParser rtp_parser(buffer, length);
+ RtpUtility::RtpHeaderParser rtp_parser(buffer, length);
RTPHeader rtp_header;
rtp_parser.Parse(rtp_header);
TRACE_EVENT_INSTANT2("webrtc_rtp", "PrepareAndSendPacket",
@@ -821,12 +845,11 @@ void RTPSender::UpdateRtpStats(const uint8_t* buffer,
bool is_retransmit) {
StreamDataCounters* counters;
// Get ssrc before taking statistics_crit_ to avoid possible deadlock.
- uint32_t ssrc = SSRC();
+ uint32_t ssrc = is_rtx ? RtxSsrc() : SSRC();
CriticalSectionScoped lock(statistics_crit_.get());
if (is_rtx) {
counters = &rtx_rtp_stats_;
- ssrc = ssrc_rtx_;
} else {
counters = &rtp_stats_;
}
@@ -868,6 +891,7 @@ int RTPSender::TimeToSendPadding(int bytes) {
int payload_type;
int64_t capture_time_ms;
uint32_t timestamp;
+ int rtx;
{
CriticalSectionScoped cs(send_critsect_);
if (!sending_media_) {
@@ -883,8 +907,11 @@ int RTPSender::TimeToSendPadding(int bytes) {
capture_time_ms +=
(clock_->TimeInMilliseconds() - last_timestamp_time_ms_);
}
+ rtx = rtx_;
}
- int bytes_sent = SendRedundantPayloads(payload_type, bytes);
+ int bytes_sent = 0;
+ if ((rtx & kRtxRedundantPayloads) != 0)
+ bytes_sent = SendRedundantPayloads(payload_type, bytes);
bytes -= bytes_sent;
if (bytes > 0) {
int padding_sent = SendPadData(payload_type,
@@ -893,19 +920,19 @@ int RTPSender::TimeToSendPadding(int bytes) {
bytes,
kDontStore,
true,
- rtx_ == kRtxOff);
+ rtx == kRtxOff);
bytes_sent += padding_sent;
}
return bytes_sent;
}
-// TODO(pwestin): send in the RTPHeaderParser to avoid parsing it again.
+// TODO(pwestin): send in the RtpHeaderParser to avoid parsing it again.
int32_t RTPSender::SendToNetwork(
uint8_t *buffer, int payload_length, int rtp_header_length,
int64_t capture_time_ms, StorageType storage,
PacedSender::Priority priority) {
- ModuleRTPUtility::RTPHeaderParser rtp_parser(
- buffer, payload_length + rtp_header_length);
+ RtpUtility::RtpHeaderParser rtp_parser(buffer,
+ payload_length + rtp_header_length);
RTPHeader rtp_header;
rtp_parser.Parse(rtp_header);
@@ -930,8 +957,11 @@ int32_t RTPSender::SendToNetwork(
}
if (paced_sender_ && storage != kDontStore) {
+ int64_t clock_delta_ms = clock_->TimeInMilliseconds() -
+ TickTime::MillisecondTimestamp();
if (!paced_sender_->SendPacket(priority, rtp_header.ssrc,
- rtp_header.sequenceNumber, capture_time_ms,
+ rtp_header.sequenceNumber,
+ capture_time_ms + clock_delta_ms,
payload_length, false)) {
// We can't send the packet right now.
// We will be called when it is time.
@@ -966,6 +996,7 @@ void RTPSender::ProcessBitrate() {
}
uint16_t RTPSender::RTPHeaderLength() const {
+ CriticalSectionScoped lock(send_critsect_);
uint16_t rtp_header_length = 12;
if (include_csrcs_) {
rtp_header_length += sizeof(uint32_t) * num_csrcs_;
@@ -980,12 +1011,19 @@ uint16_t RTPSender::IncrementSequenceNumber() {
}
void RTPSender::ResetDataCounters() {
+ uint32_t ssrc;
+ uint32_t ssrc_rtx;
+ {
+ CriticalSectionScoped ssrc_lock(send_critsect_);
+ ssrc = ssrc_;
+ ssrc_rtx = ssrc_rtx_;
+ }
CriticalSectionScoped lock(statistics_crit_.get());
rtp_stats_ = StreamDataCounters();
rtx_rtp_stats_ = StreamDataCounters();
if (rtp_stats_callback_) {
- rtp_stats_callback_->DataCountersUpdated(rtp_stats_, ssrc_);
- rtp_stats_callback_->DataCountersUpdated(rtx_rtp_stats_, ssrc_rtx_);
+ rtp_stats_callback_->DataCountersUpdated(rtp_stats_, ssrc);
+ rtp_stats_callback_->DataCountersUpdated(rtx_rtp_stats_, ssrc_rtx);
}
}
@@ -1009,9 +1047,9 @@ int RTPSender::CreateRTPHeader(
if (marker_bit) {
header[1] |= kRtpMarkerBitMask; // Marker bit is set.
}
- ModuleRTPUtility::AssignUWord16ToBuffer(header + 2, sequence_number);
- ModuleRTPUtility::AssignUWord32ToBuffer(header + 4, timestamp);
- ModuleRTPUtility::AssignUWord32ToBuffer(header + 8, ssrc);
+ RtpUtility::AssignUWord16ToBuffer(header + 2, sequence_number);
+ RtpUtility::AssignUWord32ToBuffer(header + 4, timestamp);
+ RtpUtility::AssignUWord32ToBuffer(header + 8, ssrc);
int32_t rtp_header_length = 12;
// Add the CSRCs if any.
@@ -1023,7 +1061,7 @@ int RTPSender::CreateRTPHeader(
}
uint8_t *ptr = &header[rtp_header_length];
for (int i = 0; i < num_csrcs; ++i) {
- ModuleRTPUtility::AssignUWord32ToBuffer(ptr, csrcs[i]);
+ RtpUtility::AssignUWord32ToBuffer(ptr, csrcs[i]);
ptr += 4;
}
header[0] = (header[0] & 0xf0) | num_csrcs;
@@ -1040,16 +1078,18 @@ int RTPSender::CreateRTPHeader(
return rtp_header_length;
}
-int32_t RTPSender::BuildRTPheader(
- uint8_t *data_buffer, const int8_t payload_type,
- const bool marker_bit, const uint32_t capture_timestamp,
- int64_t capture_time_ms, const bool time_stamp_provided,
- const bool inc_sequence_number) {
+int32_t RTPSender::BuildRTPheader(uint8_t* data_buffer,
+ const int8_t payload_type,
+ const bool marker_bit,
+ const uint32_t capture_timestamp,
+ int64_t capture_time_ms,
+ const bool timestamp_provided,
+ const bool inc_sequence_number) {
assert(payload_type >= 0);
CriticalSectionScoped cs(send_critsect_);
- if (time_stamp_provided) {
- timestamp_ = start_time_stamp_ + capture_timestamp;
+ if (timestamp_provided) {
+ timestamp_ = start_timestamp_ + capture_timestamp;
} else {
// Make a unique time stamp.
// We can't inc by the actual time, since then we increase the risk of back
@@ -1084,8 +1124,7 @@ uint16_t RTPSender::BuildRTPHeaderExtension(uint8_t* data_buffer) const {
const uint32_t kHeaderLength = kRtpOneByteHeaderLength;
// Add extension ID (0xBEDE).
- ModuleRTPUtility::AssignUWord16ToBuffer(data_buffer,
- kRtpOneByteHeaderExtensionId);
+ RtpUtility::AssignUWord16ToBuffer(data_buffer, kRtpOneByteHeaderExtensionId);
// Add extensions.
uint16_t total_block_length = 0;
@@ -1118,8 +1157,8 @@ uint16_t RTPSender::BuildRTPHeaderExtension(uint8_t* data_buffer) const {
}
// Set header length (in number of Word32, header excluded).
assert(total_block_length % 4 == 0);
- ModuleRTPUtility::AssignUWord16ToBuffer(data_buffer + kPosLength,
- total_block_length / 4);
+ RtpUtility::AssignUWord16ToBuffer(data_buffer + kPosLength,
+ total_block_length / 4);
// Total added length.
return kHeaderLength + total_block_length;
}
@@ -1153,8 +1192,8 @@ uint8_t RTPSender::BuildTransmissionTimeOffsetExtension(
size_t pos = 0;
const uint8_t len = 2;
data_buffer[pos++] = (id << 4) + len;
- ModuleRTPUtility::AssignUWord24ToBuffer(data_buffer + pos,
- transmission_time_offset_);
+ RtpUtility::AssignUWord24ToBuffer(data_buffer + pos,
+ transmission_time_offset_);
pos += 3;
assert(pos == kTransmissionTimeOffsetLength);
return kTransmissionTimeOffsetLength;
@@ -1222,8 +1261,7 @@ uint8_t RTPSender::BuildAbsoluteSendTimeExtension(uint8_t* data_buffer) const {
size_t pos = 0;
const uint8_t len = 2;
data_buffer[pos++] = (id << 4) + len;
- ModuleRTPUtility::AssignUWord24ToBuffer(data_buffer + pos,
- absolute_send_time_);
+ RtpUtility::AssignUWord24ToBuffer(data_buffer + pos, absolute_send_time_);
pos += 3;
assert(pos == kAbsoluteSendTimeLength);
return kAbsoluteSendTimeLength;
@@ -1271,8 +1309,8 @@ void RTPSender::UpdateTransmissionTimeOffset(
return;
}
// Update transmission offset field (converting to a 90 kHz timestamp).
- ModuleRTPUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
- time_diff_ms * 90); // RTP timestamp.
+ RtpUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
+ time_diff_ms * 90); // RTP timestamp.
}
bool RTPSender::UpdateAudioLevel(uint8_t *rtp_packet,
@@ -1359,18 +1397,19 @@ void RTPSender::UpdateAbsoluteSendTime(
}
// Update absolute send time field (convert ms to 24-bit unsigned with 18 bit
// fractional part).
- ModuleRTPUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
- ((now_ms << 18) / 1000) & 0x00ffffff);
+ RtpUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
+ ((now_ms << 18) / 1000) & 0x00ffffff);
}
void RTPSender::SetSendingStatus(bool enabled) {
if (enabled) {
uint32_t frequency_hz = SendPayloadFrequency();
- uint32_t RTPtime = ModuleRTPUtility::GetCurrentRTP(clock_, frequency_hz);
+ uint32_t RTPtime = RtpUtility::GetCurrentRTP(clock_, frequency_hz);
// Will be ignored if it's already configured via API.
SetStartTimestamp(RTPtime, false);
} else {
+ CriticalSectionScoped lock(send_critsect_);
if (!ssrc_forced_) {
// Generate a new SSRC.
ssrc_db_.ReturnSSRC(ssrc_);
@@ -1403,18 +1442,18 @@ uint32_t RTPSender::Timestamp() const {
void RTPSender::SetStartTimestamp(uint32_t timestamp, bool force) {
CriticalSectionScoped cs(send_critsect_);
if (force) {
- start_time_stamp_forced_ = force;
- start_time_stamp_ = timestamp;
+ start_timestamp_forced_ = true;
+ start_timestamp_ = timestamp;
} else {
- if (!start_time_stamp_forced_) {
- start_time_stamp_ = timestamp;
+ if (!start_timestamp_forced_) {
+ start_timestamp_ = timestamp;
}
}
}
uint32_t RTPSender::StartTimestamp() const {
CriticalSectionScoped cs(send_critsect_);
- return start_time_stamp_;
+ return start_timestamp_;
}
uint32_t RTPSender::GenerateNewSSRC() {
@@ -1451,6 +1490,7 @@ uint32_t RTPSender::SSRC() const {
}
void RTPSender::SetCSRCStatus(const bool include) {
+ CriticalSectionScoped lock(send_critsect_);
include_csrcs_ = include;
}
@@ -1589,8 +1629,8 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, uint16_t* length,
CriticalSectionScoped cs(send_critsect_);
uint8_t* data_buffer_rtx = buffer_rtx;
// Add RTX header.
- ModuleRTPUtility::RTPHeaderParser rtp_parser(
- reinterpret_cast<const uint8_t *>(buffer), *length);
+ RtpUtility::RtpHeaderParser rtp_parser(
+ reinterpret_cast<const uint8_t*>(buffer), *length);
RTPHeader rtp_header;
rtp_parser.Parse(rtp_header);
@@ -1607,15 +1647,15 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, uint16_t* length,
// Replace sequence number.
uint8_t *ptr = data_buffer_rtx + 2;
- ModuleRTPUtility::AssignUWord16ToBuffer(ptr, sequence_number_rtx_++);
+ RtpUtility::AssignUWord16ToBuffer(ptr, sequence_number_rtx_++);
// Replace SSRC.
ptr += 6;
- ModuleRTPUtility::AssignUWord32ToBuffer(ptr, ssrc_rtx_);
+ RtpUtility::AssignUWord32ToBuffer(ptr, ssrc_rtx_);
// Add OSN (original sequence number).
ptr = data_buffer_rtx + rtp_header.headerLength;
- ModuleRTPUtility::AssignUWord16ToBuffer(ptr, rtp_header.sequenceNumber);
+ RtpUtility::AssignUWord16ToBuffer(ptr, rtp_header.sequenceNumber);
ptr += 2;
// Add original payload data.
@@ -1626,8 +1666,6 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, uint16_t* length,
void RTPSender::RegisterFrameCountObserver(FrameCountObserver* observer) {
CriticalSectionScoped cs(statistics_crit_.get());
- if (observer != NULL)
- assert(frame_count_observer_ == NULL);
frame_count_observer_ = observer;
}
@@ -1639,8 +1677,6 @@ FrameCountObserver* RTPSender::GetFrameCountObserver() const {
void RTPSender::RegisterRtpStatisticsCallback(
StreamDataCountersCallback* callback) {
CriticalSectionScoped cs(statistics_crit_.get());
- if (callback != NULL)
- assert(rtp_stats_callback_ == NULL);
rtp_stats_callback_ = callback;
}
@@ -1649,24 +1685,55 @@ StreamDataCountersCallback* RTPSender::GetRtpStatisticsCallback() const {
return rtp_stats_callback_;
}
-void RTPSender::RegisterBitrateObserver(BitrateStatisticsObserver* observer) {
- CriticalSectionScoped cs(statistics_crit_.get());
- if (observer != NULL)
- assert(bitrate_callback_ == NULL);
- bitrate_callback_ = observer;
-}
-
-BitrateStatisticsObserver* RTPSender::GetBitrateObserver() const {
- CriticalSectionScoped cs(statistics_crit_.get());
- return bitrate_callback_;
-}
-
uint32_t RTPSender::BitrateSent() const { return bitrate_sent_.BitrateLast(); }
void RTPSender::BitrateUpdated(const BitrateStatistics& stats) {
- CriticalSectionScoped cs(statistics_crit_.get());
+ uint32_t ssrc;
+ {
+ CriticalSectionScoped ssrc_lock(send_critsect_);
+ ssrc = ssrc_;
+ }
if (bitrate_callback_) {
- bitrate_callback_->Notify(stats, ssrc_);
+ bitrate_callback_->Notify(stats, ssrc);
}
}
+
+void RTPSender::SetRtpState(const RtpState& rtp_state) {
+ SetStartTimestamp(rtp_state.start_timestamp, true);
+ CriticalSectionScoped lock(send_critsect_);
+ sequence_number_ = rtp_state.sequence_number;
+ sequence_number_forced_ = true;
+ timestamp_ = rtp_state.timestamp;
+ capture_time_ms_ = rtp_state.capture_time_ms;
+ last_timestamp_time_ms_ = rtp_state.last_timestamp_time_ms;
+}
+
+RtpState RTPSender::GetRtpState() const {
+ CriticalSectionScoped lock(send_critsect_);
+
+ RtpState state;
+ state.sequence_number = sequence_number_;
+ state.start_timestamp = start_timestamp_;
+ state.timestamp = timestamp_;
+ state.capture_time_ms = capture_time_ms_;
+ state.last_timestamp_time_ms = last_timestamp_time_ms_;
+
+ return state;
+}
+
+void RTPSender::SetRtxRtpState(const RtpState& rtp_state) {
+ CriticalSectionScoped lock(send_critsect_);
+ sequence_number_rtx_ = rtp_state.sequence_number;
+}
+
+RtpState RTPSender::GetRtxRtpState() const {
+ CriticalSectionScoped lock(send_critsect_);
+
+ RtpState state;
+ state.sequence_number = sequence_number_rtx_;
+ state.start_timestamp = start_timestamp_;
+
+ return state;
+}
+
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_sender.h b/modules/rtp_rtcp/source/rtp_sender.h
index 291e619b..0cc35cf4 100644
--- a/modules/rtp_rtcp/source/rtp_sender.h
+++ b/modules/rtp_rtcp/source/rtp_sender.h
@@ -43,12 +43,13 @@ class RTPSenderInterface {
virtual uint32_t SSRC() const = 0;
virtual uint32_t Timestamp() const = 0;
- virtual int32_t BuildRTPheader(
- uint8_t *data_buffer, const int8_t payload_type,
- const bool marker_bit, const uint32_t capture_time_stamp,
- int64_t capture_time_ms,
- const bool time_stamp_provided = true,
- const bool inc_sequence_number = true) = 0;
+ virtual int32_t BuildRTPheader(uint8_t* data_buffer,
+ const int8_t payload_type,
+ const bool marker_bit,
+ const uint32_t capture_timestamp,
+ int64_t capture_time_ms,
+ const bool timestamp_provided = true,
+ const bool inc_sequence_number = true) = 0;
virtual uint16_t RTPHeaderLength() const = 0;
virtual uint16_t IncrementSequenceNumber() = 0;
@@ -68,7 +69,8 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
public:
RTPSender(const int32_t id, const bool audio, Clock *clock,
Transport *transport, RtpAudioFeedback *audio_feedback,
- PacedSender *paced_sender);
+ PacedSender *paced_sender,
+ BitrateStatisticsObserver* bitrate_callback);
virtual ~RTPSender();
void ProcessBitrate();
@@ -132,13 +134,15 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
int32_t SetMaxPayloadLength(const uint16_t length,
const uint16_t packet_over_head);
- int32_t SendOutgoingData(
- const FrameType frame_type, const int8_t payload_type,
- const uint32_t time_stamp, int64_t capture_time_ms,
- const uint8_t *payload_data, const uint32_t payload_size,
- const RTPFragmentationHeader *fragmentation,
- VideoCodecInformation *codec_info = NULL,
- const RTPVideoTypeHeader * rtp_type_hdr = NULL);
+ int32_t SendOutgoingData(const FrameType frame_type,
+ const int8_t payload_type,
+ const uint32_t timestamp,
+ int64_t capture_time_ms,
+ const uint8_t* payload_data,
+ const uint32_t payload_size,
+ const RTPFragmentationHeader* fragmentation,
+ VideoCodecInformation* codec_info = NULL,
+ const RTPVideoTypeHeader* rtp_type_hdr = NULL);
// RTP header extension
int32_t SetTransmissionTimeOffset(
@@ -189,16 +193,19 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
void RTXStatus(int* mode, uint32_t* ssrc, int* payload_type) const;
+ uint32_t RtxSsrc() const;
void SetRtxSsrc(uint32_t ssrc);
void SetRtxPayloadType(int payloadType);
// Functions wrapping RTPSenderInterface.
virtual int32_t BuildRTPheader(
- uint8_t *data_buffer, const int8_t payload_type,
- const bool marker_bit, const uint32_t capture_time_stamp,
+ uint8_t* data_buffer,
+ const int8_t payload_type,
+ const bool marker_bit,
+ const uint32_t capture_timestamp,
int64_t capture_time_ms,
- const bool time_stamp_provided = true,
+ const bool timestamp_provided = true,
const bool inc_sequence_number = true) OVERRIDE;
virtual uint16_t RTPHeaderLength() const OVERRIDE;
@@ -269,14 +276,15 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
void RegisterRtpStatisticsCallback(StreamDataCountersCallback* callback);
StreamDataCountersCallback* GetRtpStatisticsCallback() const;
- // Called on new send bitrate estimate.
- void RegisterBitrateObserver(BitrateStatisticsObserver* observer);
- BitrateStatisticsObserver* GetBitrateObserver() const;
-
uint32_t BitrateSent() const;
virtual void BitrateUpdated(const BitrateStatistics& stats) OVERRIDE;
+ void SetRtpState(const RtpState& rtp_state);
+ RtpState GetRtpState() const;
+ void SetRtxRtpState(const RtpState& rtp_state);
+ RtpState GetRtxRtpState() const;
+
protected:
int32_t CheckPayloadType(const int8_t payload_type,
RtpVideoCodecTypes *video_type);
@@ -348,7 +356,7 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
uint16_t packet_over_head_;
int8_t payload_type_ GUARDED_BY(send_critsect_);
- std::map<int8_t, ModuleRTPUtility::Payload *> payload_type_map_;
+ std::map<int8_t, RtpUtility::Payload*> payload_type_map_;
RtpHeaderExtensionMap rtp_header_extension_map_;
int32_t transmission_time_offset_;
@@ -363,34 +371,34 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
// Statistics
scoped_ptr<CriticalSectionWrapper> statistics_crit_;
- SendDelayMap send_delays_;
- std::map<FrameType, uint32_t> frame_counts_;
- FrameCountObserver* frame_count_observer_;
- StreamDataCounters rtp_stats_;
- StreamDataCounters rtx_rtp_stats_;
- StreamDataCountersCallback* rtp_stats_callback_;
- BitrateStatisticsObserver* bitrate_callback_;
+ SendDelayMap send_delays_ GUARDED_BY(statistics_crit_);
+ std::map<FrameType, uint32_t> frame_counts_ GUARDED_BY(statistics_crit_);
+ FrameCountObserver* frame_count_observer_ GUARDED_BY(statistics_crit_);
+ StreamDataCounters rtp_stats_ GUARDED_BY(statistics_crit_);
+ StreamDataCounters rtx_rtp_stats_ GUARDED_BY(statistics_crit_);
+ StreamDataCountersCallback* rtp_stats_callback_ GUARDED_BY(statistics_crit_);
+ BitrateStatisticsObserver* const bitrate_callback_;
// RTP variables
- bool start_time_stamp_forced_;
- uint32_t start_time_stamp_;
- SSRCDatabase &ssrc_db_;
- uint32_t remote_ssrc_;
- bool sequence_number_forced_;
- uint16_t sequence_number_;
- uint16_t sequence_number_rtx_;
- bool ssrc_forced_;
- uint32_t ssrc_;
- uint32_t timestamp_;
- int64_t capture_time_ms_;
- int64_t last_timestamp_time_ms_;
- bool last_packet_marker_bit_;
- uint8_t num_csrcs_;
- uint32_t csrcs_[kRtpCsrcSize];
- bool include_csrcs_;
- int rtx_;
- uint32_t ssrc_rtx_;
- int payload_type_rtx_;
+ bool start_timestamp_forced_ GUARDED_BY(send_critsect_);
+ uint32_t start_timestamp_ GUARDED_BY(send_critsect_);
+ SSRCDatabase& ssrc_db_ GUARDED_BY(send_critsect_);
+ uint32_t remote_ssrc_ GUARDED_BY(send_critsect_);
+ bool sequence_number_forced_ GUARDED_BY(send_critsect_);
+ uint16_t sequence_number_ GUARDED_BY(send_critsect_);
+ uint16_t sequence_number_rtx_ GUARDED_BY(send_critsect_);
+ bool ssrc_forced_ GUARDED_BY(send_critsect_);
+ uint32_t ssrc_ GUARDED_BY(send_critsect_);
+ uint32_t timestamp_ GUARDED_BY(send_critsect_);
+ int64_t capture_time_ms_ GUARDED_BY(send_critsect_);
+ int64_t last_timestamp_time_ms_ GUARDED_BY(send_critsect_);
+ bool last_packet_marker_bit_ GUARDED_BY(send_critsect_);
+ uint8_t num_csrcs_ GUARDED_BY(send_critsect_);
+ uint32_t csrcs_[kRtpCsrcSize] GUARDED_BY(send_critsect_);
+ bool include_csrcs_ GUARDED_BY(send_critsect_);
+ int rtx_ GUARDED_BY(send_critsect_);
+ uint32_t ssrc_rtx_ GUARDED_BY(send_critsect_);
+ int payload_type_rtx_ GUARDED_BY(send_critsect_);
// Note: Don't access this variable directly, always go through
// SetTargetBitrateKbps or GetTargetBitrateKbps. Also remember
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.cc b/modules/rtp_rtcp/source/rtp_sender_audio.cc
index 6b3e2276..99c00851 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -89,10 +89,10 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
const uint32_t frequency,
const uint8_t channels,
const uint32_t rate,
- ModuleRTPUtility::Payload*& payload) {
+ RtpUtility::Payload*& payload) {
CriticalSectionScoped cs(_sendAudioCritsect);
- if (ModuleRTPUtility::StringCompare(payloadName, "cn", 2)) {
+ if (RtpUtility::StringCompare(payloadName, "cn", 2)) {
// we can have multiple CNG payload types
if (frequency == 8000) {
_cngNBPayloadType = payloadType;
@@ -110,14 +110,14 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
return -1;
}
}
- if (ModuleRTPUtility::StringCompare(payloadName, "telephone-event", 15)) {
+ if (RtpUtility::StringCompare(payloadName, "telephone-event", 15)) {
// Don't add it to the list
// we dont want to allow send with a DTMF payloadtype
_dtmfPayloadType = payloadType;
return 0;
// The default timestamp rate is 8000 Hz, but other rates may be defined.
}
- payload = new ModuleRTPUtility::Payload;
+ payload = new RtpUtility::Payload;
payload->typeSpecific.Audio.frequency = frequency;
payload->typeSpecific.Audio.channels = channels;
payload->typeSpecific.Audio.rate = rate;
@@ -388,8 +388,8 @@ int32_t RTPSenderAudio::SendAudio(
return -1;
}
uint32_t REDheader = (timestampOffset << 10) + blockLength;
- ModuleRTPUtility::AssignUWord24ToBuffer(dataBuffer + rtpHeaderLength,
- REDheader);
+ RtpUtility::AssignUWord24ToBuffer(dataBuffer + rtpHeaderLength,
+ REDheader);
rtpHeaderLength += 3;
dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
@@ -436,7 +436,7 @@ int32_t RTPSenderAudio::SendAudio(
// Update audio level extension, if included.
{
uint16_t packetSize = payloadSize + rtpHeaderLength;
- ModuleRTPUtility::RTPHeaderParser rtp_parser(dataBuffer, packetSize);
+ RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize);
RTPHeader rtp_header;
rtp_parser.Parse(rtp_header);
_rtpSender->UpdateAudioLevel(dataBuffer, packetSize, rtp_header,
@@ -558,7 +558,7 @@ RTPSenderAudio::SendTelephoneEventPacket(const bool ended,
// First byte is Event number, equals key number
dtmfbuffer[12] = _dtmfKey;
dtmfbuffer[13] = E|R|volume;
- ModuleRTPUtility::AssignUWord16ToBuffer(dtmfbuffer+14, duration);
+ RtpUtility::AssignUWord16ToBuffer(dtmfbuffer + 14, duration);
_sendAudioCritsect->Leave();
TRACE_EVENT_INSTANT2("webrtc_rtp",
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.h b/modules/rtp_rtcp/source/rtp_sender_audio.h
index 732199c1..d3f67e5e 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -26,13 +26,12 @@ public:
RTPSender* rtpSender);
virtual ~RTPSenderAudio();
- int32_t RegisterAudioPayload(
- const char payloadName[RTP_PAYLOAD_NAME_SIZE],
- const int8_t payloadType,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate,
- ModuleRTPUtility::Payload*& payload);
+ int32_t RegisterAudioPayload(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+ const int8_t payloadType,
+ const uint32_t frequency,
+ const uint8_t channels,
+ const uint32_t rate,
+ RtpUtility::Payload*& payload);
int32_t SendAudio(const FrameType frameType,
const int8_t payloadType,
diff --git a/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index 18482890..e08aa202 100644
--- a/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -94,7 +94,7 @@ class RtpSenderTest : public ::testing::Test {
virtual void SetUp() {
rtp_sender_.reset(new RTPSender(0, false, &fake_clock_, &transport_, NULL,
- &mock_paced_sender_));
+ &mock_paced_sender_, NULL));
rtp_sender_->SetSequenceNumber(kSeqNum);
}
@@ -204,7 +204,7 @@ TEST_F(RtpSenderTest, BuildRTPPacket) {
EXPECT_EQ(kRtpHeaderSize, length);
// Verify
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+ webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
const bool valid_rtp_header = rtp_parser.Parse(rtp_header, NULL);
@@ -235,7 +235,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
length);
// Verify
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+ webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
RtpHeaderExtensionMap map;
@@ -276,7 +276,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithNegativeTransmissionOffsetExtension) {
length);
// Verify
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+ webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
RtpHeaderExtensionMap map;
@@ -306,7 +306,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithAbsoluteSendTimeExtension) {
length);
// Verify
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+ webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
RtpHeaderExtensionMap map;
@@ -344,7 +344,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithAudioLevelExtension) {
length);
// Verify
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+ webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
// Updating audio level is done in RTPSenderAudio, so simulate it here.
@@ -394,7 +394,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithHeaderExtensions) {
length);
// Verify
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+ webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
// Updating audio level is done in RTPSenderAudio, so simulate it here.
@@ -471,8 +471,8 @@ TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) {
EXPECT_EQ(1, transport_.packets_sent_);
EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
// Parse sent packet.
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(
- transport_.last_sent_packet_, rtp_length);
+ webrtc::RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
+ rtp_length);
webrtc::RTPHeader rtp_header;
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionTransmissionTimeOffset,
@@ -533,8 +533,8 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) {
EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
// Parse sent packet.
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(
- transport_.last_sent_packet_, rtp_length);
+ webrtc::RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
+ rtp_length);
webrtc::RTPHeader rtp_header;
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionTransmissionTimeOffset,
@@ -672,7 +672,7 @@ TEST_F(RtpSenderTest, SendPadding) {
TEST_F(RtpSenderTest, SendRedundantPayloads) {
MockTransport transport;
rtp_sender_.reset(new RTPSender(0, false, &fake_clock_, &transport, NULL,
- &mock_paced_sender_));
+ &mock_paced_sender_, NULL));
rtp_sender_->SetSequenceNumber(kSeqNum);
// Make all packets go through the pacer.
EXPECT_CALL(mock_paced_sender_,
@@ -744,8 +744,8 @@ TEST_F(RtpSenderTest, SendGenericVideo) {
4321, payload, sizeof(payload),
NULL));
- ModuleRTPUtility::RTPHeaderParser rtp_parser(transport_.last_sent_packet_,
- transport_.last_sent_packet_len_);
+ RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
+ transport_.last_sent_packet_len_);
webrtc::RTPHeader rtp_header;
ASSERT_TRUE(rtp_parser.Parse(rtp_header));
@@ -770,8 +770,8 @@ TEST_F(RtpSenderTest, SendGenericVideo) {
1234, 4321, payload,
sizeof(payload), NULL));
- ModuleRTPUtility::RTPHeaderParser rtp_parser2(transport_.last_sent_packet_,
- transport_.last_sent_packet_len_);
+ RtpUtility::RtpHeaderParser rtp_parser2(transport_.last_sent_packet_,
+ transport_.last_sent_packet_len_);
ASSERT_TRUE(rtp_parser.Parse(rtp_header));
payload_data = GetPayloadData(rtp_header, transport_.last_sent_packet_);
@@ -865,6 +865,8 @@ TEST_F(RtpSenderTest, BitrateCallbacks) {
uint32_t ssrc_;
BitrateStatistics bitrate_;
} callback;
+ rtp_sender_.reset(new RTPSender(0, false, &fake_clock_, &transport_, NULL,
+ &mock_paced_sender_, &callback));
// Simulate kNumPackets sent with kPacketInterval ms intervals.
const uint32_t kNumPackets = 15;
@@ -881,8 +883,6 @@ TEST_F(RtpSenderTest, BitrateCallbacks) {
rtp_sender_->SetStorePacketsStatus(true, 1);
uint32_t ssrc = rtp_sender_->SSRC();
- rtp_sender_->RegisterBitrateObserver(&callback);
-
// Initial process call so we get a new time window.
rtp_sender_->ProcessBitrate();
uint64_t start_time = fake_clock_.CurrentNtpInMilliseconds();
@@ -912,7 +912,7 @@ TEST_F(RtpSenderTest, BitrateCallbacks) {
EXPECT_EQ((kPacketOverhead + sizeof(payload)) * 8 * expected_packet_rate,
callback.bitrate_.bitrate_bps);
- rtp_sender_->RegisterBitrateObserver(NULL);
+ rtp_sender_.reset();
}
class RtpSenderAudioTest : public RtpSenderTest {
@@ -922,7 +922,7 @@ class RtpSenderAudioTest : public RtpSenderTest {
virtual void SetUp() {
payload_ = kAudioPayload;
rtp_sender_.reset(new RTPSender(0, true, &fake_clock_, &transport_, NULL,
- &mock_paced_sender_));
+ &mock_paced_sender_, NULL));
rtp_sender_->SetSequenceNumber(kSeqNum);
}
};
@@ -1018,8 +1018,8 @@ TEST_F(RtpSenderAudioTest, SendAudio) {
4321, payload, sizeof(payload),
NULL));
- ModuleRTPUtility::RTPHeaderParser rtp_parser(transport_.last_sent_packet_,
- transport_.last_sent_packet_len_);
+ RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
+ transport_.last_sent_packet_len_);
webrtc::RTPHeader rtp_header;
ASSERT_TRUE(rtp_parser.Parse(rtp_header));
@@ -1047,8 +1047,8 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) {
4321, payload, sizeof(payload),
NULL));
- ModuleRTPUtility::RTPHeaderParser rtp_parser(transport_.last_sent_packet_,
- transport_.last_sent_packet_len_);
+ RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
+ transport_.last_sent_packet_len_);
webrtc::RTPHeader rtp_header;
ASSERT_TRUE(rtp_parser.Parse(rtp_header));
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc
index 5d8ae166..ea5f7a7e 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -84,18 +84,20 @@ int32_t RTPSenderVideo::RegisterVideoPayload(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t maxBitRate,
- ModuleRTPUtility::Payload*& payload) {
+ RtpUtility::Payload*& payload) {
CriticalSectionScoped cs(_sendVideoCritsect);
RtpVideoCodecTypes videoType = kRtpVideoGeneric;
- if (ModuleRTPUtility::StringCompare(payloadName, "VP8",3)) {
+ if (RtpUtility::StringCompare(payloadName, "VP8", 3)) {
videoType = kRtpVideoVp8;
- } else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
+ } else if (RtpUtility::StringCompare(payloadName, "H264", 4)) {
+ videoType = kRtpVideoH264;
+ } else if (RtpUtility::StringCompare(payloadName, "I420", 4)) {
videoType = kRtpVideoGeneric;
} else {
videoType = kRtpVideoGeneric;
}
- payload = new ModuleRTPUtility::Payload;
+ payload = new RtpUtility::Payload;
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
payload->typeSpecific.Video.videoCodecType = videoType;
@@ -211,7 +213,7 @@ RTPSenderVideo::SendRTPIntraRequest()
data[2] = 0;
data[3] = 1; // length
- ModuleRTPUtility::AssignUWord32ToBuffer(data+4, _rtpSender.SSRC());
+ RtpUtility::AssignUWord32ToBuffer(data + 4, _rtpSender.SSRC());
TRACE_EVENT_INSTANT1("webrtc_rtp",
"Video::IntraRequest",
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.h b/modules/rtp_rtcp/source/rtp_sender_video.h
index daa730e8..82bd1de8 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -39,11 +39,10 @@ public:
uint16_t FECPacketOverhead() const;
- int32_t RegisterVideoPayload(
- const char payloadName[RTP_PAYLOAD_NAME_SIZE],
- const int8_t payloadType,
- const uint32_t maxBitRate,
- ModuleRTPUtility::Payload*& payload);
+ int32_t RegisterVideoPayload(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+ const int8_t payloadType,
+ const uint32_t maxBitRate,
+ RtpUtility::Payload*& payload);
int32_t SendVideo(const RtpVideoCodecTypes videoType,
const FrameType frameType,
diff --git a/modules/rtp_rtcp/source/rtp_utility.cc b/modules/rtp_rtcp/source/rtp_utility.cc
index c1f3c642..95389b46 100644
--- a/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/modules/rtp_rtcp/source/rtp_utility.cc
@@ -66,7 +66,7 @@ ReceiveStatistics* NullObjectReceiveStatistics() {
return &null_receive_statistics;
}
-namespace ModuleRTPUtility {
+namespace RtpUtility {
enum {
kRtcpExpectedVersion = 2,
@@ -215,16 +215,16 @@ void RTPPayload::SetType(RtpVideoCodecTypes videoType) {
}
}
-RTPHeaderParser::RTPHeaderParser(const uint8_t* rtpData,
- const uint32_t rtpDataLength)
- : _ptrRTPDataBegin(rtpData),
- _ptrRTPDataEnd(rtpData ? (rtpData + rtpDataLength) : NULL) {
+RtpHeaderParser::RtpHeaderParser(const uint8_t* rtpData,
+ const size_t rtpDataLength)
+ : _ptrRTPDataBegin(rtpData),
+ _ptrRTPDataEnd(rtpData ? (rtpData + rtpDataLength) : NULL) {
}
-RTPHeaderParser::~RTPHeaderParser() {
+RtpHeaderParser::~RtpHeaderParser() {
}
-bool RTPHeaderParser::RTCP() const {
+bool RtpHeaderParser::RTCP() const {
// 72 to 76 is reserved for RTP
// 77 to 79 is not reserver but they are not assigned we will block them
// for RTCP 200 SR == marker bit + 72
@@ -299,7 +299,7 @@ bool RTPHeaderParser::RTCP() const {
return RTCP;
}
-bool RTPHeaderParser::ParseRtcp(RTPHeader* header) const {
+bool RtpHeaderParser::ParseRtcp(RTPHeader* header) const {
assert(header != NULL);
const ptrdiff_t length = _ptrRTPDataEnd - _ptrRTPDataBegin;
@@ -328,7 +328,7 @@ bool RTPHeaderParser::ParseRtcp(RTPHeader* header) const {
return true;
}
-bool RTPHeaderParser::Parse(RTPHeader& header,
+bool RtpHeaderParser::Parse(RTPHeader& header,
RtpHeaderExtensionMap* ptrExtensionMap) const {
const ptrdiff_t length = _ptrRTPDataEnd - _ptrRTPDataBegin;
if (length < kRtpMinParseLength) {
@@ -441,7 +441,7 @@ bool RTPHeaderParser::Parse(RTPHeader& header,
return true;
}
-void RTPHeaderParser::ParseOneByteExtensionHeader(
+void RtpHeaderParser::ParseOneByteExtensionHeader(
RTPHeader& header,
const RtpHeaderExtensionMap* ptrExtensionMap,
const uint8_t* ptrRTPDataExtensionEnd,
@@ -552,10 +552,9 @@ void RTPHeaderParser::ParseOneByteExtensionHeader(
}
}
-uint8_t RTPHeaderParser::ParsePaddingBytes(
- const uint8_t* ptrRTPDataExtensionEnd,
- const uint8_t* ptr) const {
-
+uint8_t RtpHeaderParser::ParsePaddingBytes(
+ const uint8_t* ptrRTPDataExtensionEnd,
+ const uint8_t* ptr) const {
uint8_t num_zero_bytes = 0;
while (ptrRTPDataExtensionEnd - ptr > 0) {
if (*ptr != 0) {
@@ -768,6 +767,6 @@ int RTPPayloadParser::ParseVP8TIDAndKeyIdx(RTPPayloadVP8* vp8,
return 0;
}
-} // namespace ModuleRTPUtility
+} // namespace RtpUtility
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_utility.h b/modules/rtp_rtcp/source/rtp_utility.h
index 732301f6..ef50570d 100644
--- a/modules/rtp_rtcp/source/rtp_utility.h
+++ b/modules/rtp_rtcp/source/rtp_utility.h
@@ -28,8 +28,7 @@ RtpFeedback* NullObjectRtpFeedback();
RtpAudioFeedback* NullObjectRtpAudioFeedback();
ReceiveStatistics* NullObjectReceiveStatistics();
-namespace ModuleRTPUtility
-{
+namespace RtpUtility {
// January 1970, in NTP seconds.
const uint32_t NTP_JAN_1970 = 2208988800UL;
@@ -92,12 +91,10 @@ namespace ModuleRTPUtility
*/
uint32_t BufferToUWord32(const uint8_t* dataBuffer);
- class RTPHeaderParser
- {
+ class RtpHeaderParser {
public:
- RTPHeaderParser(const uint8_t* rtpData,
- const uint32_t rtpDataLength);
- ~RTPHeaderParser();
+ RtpHeaderParser(const uint8_t* rtpData, size_t rtpDataLength);
+ ~RtpHeaderParser();
bool RTCP() const;
bool ParseRtcp(RTPHeader* header) const;
@@ -207,7 +204,7 @@ namespace ModuleRTPUtility
const RtpVideoCodecTypes _videoType;
};
-} // namespace ModuleRTPUtility
+ } // namespace RtpUtility
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_utility_unittest.cc b/modules/rtp_rtcp/source/rtp_utility_unittest.cc
index d33eaf4c..27170d55 100644
--- a/modules/rtp_rtcp/source/rtp_utility_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_utility_unittest.cc
@@ -8,9 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
/*
- * This file conatins unit tests for the ModuleRTPUtility.
+ * This file conatins unit tests for the RtpUtility.
*/
#include "testing/gtest/include/gtest/gtest.h"
@@ -20,9 +19,9 @@
namespace webrtc {
-using ModuleRTPUtility::RTPPayloadParser;
-using ModuleRTPUtility::RTPPayload;
-using ModuleRTPUtility::RTPPayloadVP8;
+using RtpUtility::RTPPayloadParser;
+using RtpUtility::RTPPayload;
+using RtpUtility::RTPPayloadVP8;
// Payload descriptor
// 0 1 2 3 4 5 6 7
@@ -81,7 +80,7 @@ TEST(ParseVP8Test, BasicHeader) {
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
- EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+ EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 4 /*PartID*/);
@@ -102,7 +101,7 @@ TEST(ParseVP8Test, PictureID) {
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
- EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+ EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
@@ -141,7 +140,7 @@ TEST(ParseVP8Test, Tl0PicIdx) {
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
- EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
+ EXPECT_EQ(RtpUtility::kIFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 0 /*PartID*/);
@@ -164,7 +163,7 @@ TEST(ParseVP8Test, TIDAndLayerSync) {
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
- EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+ EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
@@ -188,7 +187,7 @@ TEST(ParseVP8Test, KeyIdx) {
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
- EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+ EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
@@ -214,7 +213,7 @@ TEST(ParseVP8Test, MultipleExtensions) {
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
- EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+ EXPECT_EQ(RtpUtility::kPFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
@@ -263,7 +262,7 @@ TEST(ParseVP8Test, TestWithPacketizer) {
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
- EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
+ EXPECT_EQ(RtpUtility::kIFrame, parsedPacket.frameType);
EXPECT_EQ(kRtpVideoVp8, parsedPacket.type);
VerifyBasicHeader(parsedPacket.info.VP8,
diff --git a/modules/rtp_rtcp/test/testAPI/test_api.h b/modules/rtp_rtcp/test/testAPI/test_api.h
index 8061ce01..1c6b8838 100644
--- a/modules/rtp_rtcp/test/testAPI/test_api.h
+++ b/modules/rtp_rtcp/test/testAPI/test_api.h
@@ -52,7 +52,9 @@ class LoopBackTransport : public webrtc::Transport {
}
RTPHeader header;
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
- if (!parser->Parse(static_cast<const uint8_t*>(data), len, &header)) {
+ if (!parser->Parse(static_cast<const uint8_t*>(data),
+ static_cast<size_t>(len),
+ &header)) {
return -1;
}
PayloadUnion payload_specific;
diff --git a/modules/rtp_rtcp/test/testAPI/test_api_video.cc b/modules/rtp_rtcp/test/testAPI/test_api_video.cc
index 94d1e52e..4c4944d9 100644
--- a/modules/rtp_rtcp/test/testAPI/test_api_video.cc
+++ b/modules/rtp_rtcp/test/testAPI/test_api_video.cc
@@ -83,11 +83,9 @@ class RtpRtcpVideoTest : public ::testing::Test {
uint32_t sequence_number) {
dataBuffer[0] = static_cast<uint8_t>(0x80); // version 2
dataBuffer[1] = static_cast<uint8_t>(kPayloadType);
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer + 2,
- sequence_number);
- ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer + 4, timestamp);
- ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer + 8,
- 0x1234); // SSRC.
+ RtpUtility::AssignUWord16ToBuffer(dataBuffer + 2, sequence_number);
+ RtpUtility::AssignUWord32ToBuffer(dataBuffer + 4, timestamp);
+ RtpUtility::AssignUWord32ToBuffer(dataBuffer + 8, 0x1234); // SSRC.
int32_t rtpHeaderLength = 12;
return rtpHeaderLength;
}
diff --git a/modules/rtp_rtcp/test/testFec/test_fec.cc b/modules/rtp_rtcp/test/testFec/test_fec.cc
index fc11fbee..bdb53afe 100644
--- a/modules/rtp_rtcp/test/testFec/test_fec.cc
+++ b/modules/rtp_rtcp/test/testFec/test_fec.cc
@@ -134,7 +134,7 @@ TEST(FecTest, FecTest) {
fclose(randomSeedFile);
randomSeedFile = NULL;
- uint16_t seqNum = static_cast<uint16_t>(rand());
+ uint16_t seqNum = 0;
uint32_t timeStamp = static_cast<uint32_t>(rand());
const uint32_t ssrc = static_cast<uint32_t>(rand());
@@ -224,6 +224,11 @@ TEST(FecTest, FecTest) {
}
// Construct media packets.
+ // Reset the sequence number here for each FEC code/mask tested
+ // below, to avoid sequence number wrap-around. In actual decoding,
+ // old FEC packets in list are dropped if sequence number wrap
+ // around is detected. This case is currently not handled below.
+ seqNum = 0;
for (uint32_t i = 0; i < numMediaPackets; ++i) {
mediaPacket = new ForwardErrorCorrection::Packet;
mediaPacketList.push_back(mediaPacket);
@@ -254,12 +259,10 @@ TEST(FecTest, FecTest) {
// Only push one (fake) frame to the FEC.
mediaPacket->data[1] &= 0x7f;
- ModuleRTPUtility::AssignUWord16ToBuffer(&mediaPacket->data[2],
- seqNum);
- ModuleRTPUtility::AssignUWord32ToBuffer(&mediaPacket->data[4],
- timeStamp);
- ModuleRTPUtility::AssignUWord32ToBuffer(&mediaPacket->data[8],
- ssrc);
+ RtpUtility::AssignUWord16ToBuffer(&mediaPacket->data[2], seqNum);
+ RtpUtility::AssignUWord32ToBuffer(&mediaPacket->data[4],
+ timeStamp);
+ RtpUtility::AssignUWord32ToBuffer(&mediaPacket->data[8], ssrc);
// Generate random values for payload
for (int32_t j = 12; j < mediaPacket->length; ++j) {
mediaPacket->data[j] = static_cast<uint8_t>(rand() % 256);
@@ -298,7 +301,7 @@ TEST(FecTest, FecTest) {
memcpy(receivedPacket->pkt->data, mediaPacket->data,
mediaPacket->length);
receivedPacket->seq_num =
- ModuleRTPUtility::BufferToUWord16(&mediaPacket->data[2]);
+ RtpUtility::BufferToUWord16(&mediaPacket->data[2]);
receivedPacket->is_fec = false;
}
mediaPacketIdx++;
diff --git a/modules/utility/source/rtp_dump_impl.h b/modules/utility/source/rtp_dump_impl.h
index 04ae7dfc..ff3f07ce 100644
--- a/modules/utility/source/rtp_dump_impl.h
+++ b/modules/utility/source/rtp_dump_impl.h
@@ -35,7 +35,7 @@ private:
inline uint16_t RtpDumpHtons(uint16_t x) const;
// Return true if the packet starts with a valid RTCP header.
- // Note: See ModuleRTPUtility::RTPHeaderParser::RTCP() for details on how
+ // Note: See RtpUtility::RtpHeaderParser::RTCP() for details on how
// to determine if the packet is an RTCP packet.
bool RTCP(const uint8_t* packet) const;
diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn
index f5a69b18..0dc6721b 100644
--- a/modules/video_coding/BUILD.gn
+++ b/modules/video_coding/BUILD.gn
@@ -74,13 +74,49 @@ source_set("video_coding") {
}
source_set("video_coding_utility") {
- # TODO(stefan): Implement.
+ sources = [
+ "utility/exp_filter.cc",
+ "utility/include/exp_filter.h",
+ "utility/include/frame_dropper.h",
+ "utility/frame_dropper.cc",
+ ]
+
+ deps = [ "../../system_wrappers" ]
}
source_set("webrtc_i420") {
- # TODO(stefan): Implement.
+ sources = [
+ "codecs/i420/main/source/i420.cc",
+ "codecs/i420/main/interface/i420.h",
+ ]
+
+ deps = [ "../../system_wrappers" ]
}
+# TODO(holmer): Some files below has been commented out since libvpx is still
+# missing a BUILD.gn file.
source_set("webrtc_vp8") {
- # TODO(stefan): Implement.
+ sources = [
+# "codecs/vp8/default_temporal_layers.cc",
+# "codecs/vp8/default_temporal_layers.h",
+# "codecs/vp8/realtime_temporal_layers.cc",
+# "codecs/vp8/reference_picture_selection.cc",
+# "codecs/vp8/reference_picture_selection.h",
+ "codecs/vp8/include/vp8.h",
+ "codecs/vp8/include/vp8_common_types.h",
+ "codecs/vp8/temporal_layers.h",
+ "codecs/vp8/vp8_factory.cc",
+# "codecs/vp8/vp8_impl.cc",
+# "codecs/vp8/vp8_impl.h",
+ ]
+ deps = [
+ ":video_coding_utility",
+ "../../common_video",
+ "../../system_wrappers",
+ ]
+# if (build_libvpx) {
+# deps += [
+# "//third_party/libvpx",
+# ]
+# }
}
diff --git a/modules/video_coding/main/interface/video_coding_defines.h b/modules/video_coding/main/interface/video_coding_defines.h
index fab91afd..c5f93cb1 100644
--- a/modules/video_coding/main/interface/video_coding_defines.h
+++ b/modules/video_coding/main/interface/video_coding_defines.h
@@ -40,6 +40,7 @@ namespace webrtc {
#define VCM_ULPFEC_PAYLOAD_TYPE 97
#define VCM_VP8_PAYLOAD_TYPE 100
#define VCM_I420_PAYLOAD_TYPE 124
+#define VCM_H264_PAYLOAD_TYPE 127
enum VCMVideoProtection {
kProtectionNack, // Both send-side and receive-side
diff --git a/modules/video_coding/main/source/codec_database.cc b/modules/video_coding/main/source/codec_database.cc
index e7a9d91b..e5013689 100644
--- a/modules/video_coding/main/source/codec_database.cc
+++ b/modules/video_coding/main/source/codec_database.cc
@@ -102,6 +102,30 @@ bool VCMCodecDataBase::Codec(int list_id,
return true;
}
#endif
+#ifdef VIDEOCODEC_H264
+ case VCM_H264_IDX: {
+ strncpy(settings->plName, "H264", 5);
+ settings->codecType = kVideoCodecH264;
+ // 96 to 127 dynamic payload types for video codecs.
+ settings->plType = VCM_H264_PAYLOAD_TYPE;
+ settings->startBitrate = 100;
+ settings->minBitrate = VCM_MIN_BITRATE;
+ settings->maxBitrate = 0;
+ settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+ settings->width = VCM_DEFAULT_CODEC_WIDTH;
+ settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+ settings->numberOfSimulcastStreams = 0;
+ settings->qpMax = 56;
+ settings->codecSpecific.H264.profile = kProfileBase;
+ settings->codecSpecific.H264.frameDroppingOn = true;
+ settings->codecSpecific.H264.keyFrameInterval = 3000;
+ settings->codecSpecific.H264.spsData = NULL;
+ settings->codecSpecific.H264.spsLen = 0;
+ settings->codecSpecific.H264.ppsData = NULL;
+ settings->codecSpecific.H264.ppsLen = 0;
+ return true;
+ }
+#endif
#ifdef VIDEOCODEC_I420
case VCM_I420_IDX: {
strncpy(settings->plName, "I420", 5);
@@ -316,8 +340,14 @@ bool VCMCodecDataBase::RequiresEncoderReset(const VideoCodec& new_send_codec) {
case kVideoCodecVP8:
if (memcmp(&new_send_codec.codecSpecific.VP8,
&send_codec_.codecSpecific.VP8,
- sizeof(new_send_codec.codecSpecific.VP8)) !=
- 0) {
+ sizeof(new_send_codec.codecSpecific.VP8)) != 0) {
+ return true;
+ }
+ break;
+ case kVideoCodecH264:
+ if (memcmp(&new_send_codec.codecSpecific.H264,
+ &send_codec_.codecSpecific.H264,
+ sizeof(new_send_codec.codecSpecific.H264)) != 0) {
return true;
}
break;
@@ -619,6 +649,7 @@ VCMGenericDecoder* VCMCodecDataBase::CreateDecoder(VideoCodecType type) const {
return new VCMGenericDecoder(*(new I420Decoder));
#endif
default:
+ LOG(LS_WARNING) << "No internal decoder of this type exists.";
return NULL;
}
}
diff --git a/modules/video_coding/main/source/internal_defines.h b/modules/video_coding/main/source/internal_defines.h
index efc6d8dd..ef42c628 100644
--- a/modules/video_coding/main/source/internal_defines.h
+++ b/modules/video_coding/main/source/internal_defines.h
@@ -35,16 +35,21 @@ inline uint32_t MaskWord64ToUWord32(int64_t w64)
// Helper macros for creating the static codec list
#define VCM_NO_CODEC_IDX -1
#ifdef VIDEOCODEC_VP8
- #define VCM_VP8_IDX VCM_NO_CODEC_IDX + 1
+ #define VCM_VP8_IDX (VCM_NO_CODEC_IDX + 1)
#else
#define VCM_VP8_IDX VCM_NO_CODEC_IDX
#endif
+#ifdef VIDEOCODEC_H264
+ #define VCM_H264_IDX (VCM_VP8_IDX + 1)
+#else
+ #define VCM_H264_IDX VCM_VP8_IDX
+#endif
#ifdef VIDEOCODEC_I420
- #define VCM_I420_IDX VCM_VP8_IDX + 1
+ #define VCM_I420_IDX (VCM_H264_IDX + 1)
#else
- #define VCM_I420_IDX VCM_VP8_IDX
+ #define VCM_I420_IDX VCM_H264_IDX
#endif
-#define VCM_NUM_VIDEO_CODECS_AVAILABLE VCM_I420_IDX + 1
+#define VCM_NUM_VIDEO_CODECS_AVAILABLE (VCM_I420_IDX + 1)
#define VCM_NO_RECEIVER_ID 0
diff --git a/modules/video_coding/main/source/video_coding_impl.h b/modules/video_coding/main/source/video_coding_impl.h
index bf0bc790..816552fe 100644
--- a/modules/video_coding/main/source/video_coding_impl.h
+++ b/modules/video_coding/main/source/video_coding_impl.h
@@ -25,6 +25,7 @@
#include "webrtc/modules/video_coding/main/source/timing.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
namespace webrtc {
@@ -199,21 +200,25 @@ class VideoReceiver {
// in any frame
};
- Clock* clock_;
+ Clock* const clock_;
scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
CriticalSectionWrapper* _receiveCritSect;
- bool _receiverInited;
+ bool _receiverInited GUARDED_BY(_receiveCritSect);
VCMTiming _timing;
VCMTiming _dualTiming;
VCMReceiver _receiver;
VCMReceiver _dualReceiver;
VCMDecodedFrameCallback _decodedFrameCallback;
VCMDecodedFrameCallback _dualDecodedFrameCallback;
- VCMFrameTypeCallback* _frameTypeCallback;
- VCMReceiveStatisticsCallback* _receiveStatsCallback;
- VCMDecoderTimingCallback* _decoderTimingCallback;
- VCMPacketRequestCallback* _packetRequestCallback;
- VCMRenderBufferSizeCallback* render_buffer_callback_;
+ VCMFrameTypeCallback* _frameTypeCallback GUARDED_BY(process_crit_sect_);
+ VCMReceiveStatisticsCallback* _receiveStatsCallback
+ GUARDED_BY(process_crit_sect_);
+ VCMDecoderTimingCallback* _decoderTimingCallback
+ GUARDED_BY(process_crit_sect_);
+ VCMPacketRequestCallback* _packetRequestCallback
+ GUARDED_BY(process_crit_sect_);
+ VCMRenderBufferSizeCallback* render_buffer_callback_
+ GUARDED_BY(process_crit_sect_);
VCMGenericDecoder* _decoder;
VCMGenericDecoder* _dualDecoder;
#ifdef DEBUG_DECODER_BIT_STREAM
@@ -221,9 +226,9 @@ class VideoReceiver {
#endif
VCMFrameBuffer _frameFromFile;
VCMKeyRequestMode _keyRequestMode;
- bool _scheduleKeyRequest;
- size_t max_nack_list_size_;
- EncodedImageCallback* pre_decode_image_callback_;
+ bool _scheduleKeyRequest GUARDED_BY(process_crit_sect_);
+ size_t max_nack_list_size_ GUARDED_BY(process_crit_sect_);
+ EncodedImageCallback* pre_decode_image_callback_ GUARDED_BY(_receiveCritSect);
VCMCodecDataBase _codecDataBase;
VCMProcessTimer _receiveStatsTimer;
diff --git a/modules/video_coding/main/source/video_receiver.cc b/modules/video_coding/main/source/video_receiver.cc
index 5bc1c90f..0b561249 100644
--- a/modules/video_coding/main/source/video_receiver.cc
+++ b/modules/video_coding/main/source/video_receiver.cc
@@ -271,8 +271,6 @@ int32_t VideoReceiver::SetVideoProtection(VCMVideoProtection videoProtection,
// Initialize receiver, resets codec database etc
int32_t VideoReceiver::InitializeReceiver() {
- CriticalSectionScoped receive_cs(_receiveCritSect);
- CriticalSectionScoped process_cs(process_crit_sect_.get());
int32_t ret = _receiver.Initialize();
if (ret < 0) {
return ret;
@@ -285,15 +283,22 @@ int32_t VideoReceiver::InitializeReceiver() {
_codecDataBase.ResetReceiver();
_timing.Reset();
- _decoder = NULL;
- _decodedFrameCallback.SetUserReceiveCallback(NULL);
- _receiverInited = true;
- _frameTypeCallback = NULL;
- _receiveStatsCallback = NULL;
- _decoderTimingCallback = NULL;
- _packetRequestCallback = NULL;
- _keyRequestMode = kKeyOnError;
- _scheduleKeyRequest = false;
+ {
+ CriticalSectionScoped receive_cs(_receiveCritSect);
+ _receiverInited = true;
+ }
+
+ {
+ CriticalSectionScoped process_cs(process_crit_sect_.get());
+ _decoder = NULL;
+ _decodedFrameCallback.SetUserReceiveCallback(NULL);
+ _frameTypeCallback = NULL;
+ _receiveStatsCallback = NULL;
+ _decoderTimingCallback = NULL;
+ _packetRequestCallback = NULL;
+ _keyRequestMode = kKeyOnError;
+ _scheduleKeyRequest = false;
+ }
return VCM_OK;
}
@@ -781,7 +786,6 @@ void VideoReceiver::SetNackSettings(size_t max_nack_list_size,
int max_packet_age_to_nack,
int max_incomplete_time_ms) {
if (max_nack_list_size != 0) {
- CriticalSectionScoped receive_cs(_receiveCritSect);
CriticalSectionScoped process_cs(process_crit_sect_.get());
max_nack_list_size_ = max_nack_list_size;
}
diff --git a/modules/video_coding/main/test/pcap_file_reader.cc b/modules/video_coding/main/test/pcap_file_reader.cc
index 3d4e2659..68c85665 100644
--- a/modules/video_coding/main/test/pcap_file_reader.cc
+++ b/modules/video_coding/main/test/pcap_file_reader.cc
@@ -268,8 +268,7 @@ class PcapFileReaderImpl : public RtpPacketSourceInterface {
}
TRY(Read(read_buffer_, marker.payload_length));
- ModuleRTPUtility::RTPHeaderParser rtp_parser(read_buffer_,
- marker.payload_length);
+ RtpUtility::RtpHeaderParser rtp_parser(read_buffer_, marker.payload_length);
if (rtp_parser.RTCP()) {
rtp_parser.ParseRtcp(&marker.rtp_header);
packets_.push_back(marker);
diff --git a/modules/video_coding/main/test/pcap_file_reader_unittest.cc b/modules/video_coding/main/test/pcap_file_reader_unittest.cc
index 18100714..c6f1d511 100644
--- a/modules/video_coding/main/test/pcap_file_reader_unittest.cc
+++ b/modules/video_coding/main/test/pcap_file_reader_unittest.cc
@@ -55,7 +55,7 @@ class TestPcapFileReader : public ::testing::Test {
EXPECT_GE(kBufferSize, length);
length = kBufferSize;
- ModuleRTPUtility::RTPHeaderParser rtp_header_parser(data, length);
+ RtpUtility::RtpHeaderParser rtp_header_parser(data, length);
webrtc::RTPHeader header;
if (!rtp_header_parser.RTCP() && rtp_header_parser.Parse(header, NULL)) {
pps[header.ssrc]++;
diff --git a/modules/video_processing/BUILD.gn b/modules/video_processing/BUILD.gn
index 40171caf..724a9dcb 100644
--- a/modules/video_processing/BUILD.gn
+++ b/modules/video_processing/BUILD.gn
@@ -8,6 +8,51 @@
import("../../build/webrtc.gni")
+build_video_processing_sse2 = cpu_arch == "x86" || cpu_arch == "x64"
+
source_set("video_processing") {
- # TODO(stefan): Implement.
+ sources = [
+ "main/interface/video_processing.h",
+ "main/interface/video_processing_defines.h",
+ "main/source/brighten.cc",
+ "main/source/brighten.h",
+ "main/source/brightness_detection.cc",
+ "main/source/brightness_detection.h",
+ "main/source/color_enhancement.cc",
+ "main/source/color_enhancement.h",
+ "main/source/color_enhancement_private.h",
+ "main/source/content_analysis.cc",
+ "main/source/content_analysis.h",
+ "main/source/deflickering.cc",
+ "main/source/deflickering.h",
+ "main/source/denoising.cc",
+ "main/source/denoising.h",
+ "main/source/frame_preprocessor.cc",
+ "main/source/frame_preprocessor.h",
+ "main/source/spatial_resampler.cc",
+ "main/source/spatial_resampler.h",
+ "main/source/video_decimator.cc",
+ "main/source/video_decimator.h",
+ "main/source/video_processing_impl.cc",
+ "main/source/video_processing_impl.h",
+ ]
+
+ deps = [
+ "../../common_audio",
+ "../../common_video",
+ "../../modules/utility",
+ "../../system_wrappers",
+ ]
+ if (build_video_processing_sse2) {
+ deps += [ ":video_processing_sse2" ]
+ }
+}
+
+if (build_video_processing_sse2) {
+ source_set("video_processing_sse2") {
+ sources = [ "main/source/content_analysis_sse2.cc" ]
+ if (is_posix) {
+ cflags = [ "-msse2" ]
+ }
+ }
}
diff --git a/system_wrappers/source/condition_variable_unittest.cc b/system_wrappers/source/condition_variable_unittest.cc
index 0d287b71..b5c1c360 100644
--- a/system_wrappers/source/condition_variable_unittest.cc
+++ b/system_wrappers/source/condition_variable_unittest.cc
@@ -14,13 +14,11 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/system_wrappers/source/unittest_utilities.h"
namespace webrtc {
namespace {
-const int kLogTrace = false; // Set to true to enable debug logging to stdout.
const int kLongWaitMs = 100 * 1000; // A long time in testing terms
const int kShortWaitMs = 2 * 1000; // Long enough for process switches to happen
@@ -143,9 +141,7 @@ bool WaitingRunFunction(void* obj) {
class CondVarTest : public ::testing::Test {
public:
- CondVarTest()
- : trace_(kLogTrace) {
- }
+ CondVarTest() {}
virtual void SetUp() {
thread_ = ThreadWrapper::CreateThread(&WaitingRunFunction,
@@ -171,7 +167,6 @@ class CondVarTest : public ::testing::Test {
Baton baton_;
private:
- ScopedTracing trace_;
ThreadWrapper* thread_;
};
diff --git a/system_wrappers/source/critical_section_unittest.cc b/system_wrappers/source/critical_section_unittest.cc
index 5c416b2d..9b88e6b3 100644
--- a/system_wrappers/source/critical_section_unittest.cc
+++ b/system_wrappers/source/critical_section_unittest.cc
@@ -14,14 +14,11 @@
#include "webrtc/system_wrappers/interface/sleep.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/system_wrappers/source/unittest_utilities.h"
namespace webrtc {
namespace {
-const bool kLogTrace = false; // Set to true to enable debug logging to stdout.
-
// Cause a process switch. Needed to avoid depending on
// busy-wait in tests.
static void SwitchProcess() {
@@ -54,8 +51,7 @@ private:
class CritSectTest : public ::testing::Test {
public:
- CritSectTest() : trace_(kLogTrace) {
- }
+ CritSectTest() {}
// Waits a number of cycles for the count to reach a given value.
// Returns true if the target is reached or passed.
@@ -70,9 +66,6 @@ public:
}
return (count->Count() >= target);
}
-
-private:
- ScopedTracing trace_;
};
bool LockUnlockThenStopRunFunction(void* obj) {
diff --git a/system_wrappers/source/logging_unittest.cc b/system_wrappers/source/logging_unittest.cc
index 19f13940..6e45c5c6 100644
--- a/system_wrappers/source/logging_unittest.cc
+++ b/system_wrappers/source/logging_unittest.cc
@@ -46,22 +46,19 @@ class LoggingTest : public ::testing::Test, public TraceCallback {
void SetUp() {
Trace::CreateTrace();
Trace::SetTraceCallback(this);
- // Reduce the chance that spurious traces will ruin the test.
- Trace::set_level_filter(kTraceWarning | kTraceError);
}
void TearDown() {
- CriticalSectionScoped cs(crit_.get());
Trace::SetTraceCallback(NULL);
Trace::ReturnTrace();
+ CriticalSectionScoped cs(crit_.get());
ASSERT_EQ(kTraceNone, level_) << "Print() was not called";
}
scoped_ptr<CriticalSectionWrapper> crit_;
scoped_ptr<ConditionVariableWrapper> cv_;
- TraceLevel level_;
- int length_;
- std::ostringstream expected_log_;
+ TraceLevel level_ GUARDED_BY(crit_);
+ std::ostringstream expected_log_ GUARDED_BY(crit_);
};
TEST_F(LoggingTest, LogStream) {
diff --git a/system_wrappers/source/system_wrappers_tests.gyp b/system_wrappers/source/system_wrappers_tests.gyp
index f2f61566..3d08c0d0 100644
--- a/system_wrappers/source/system_wrappers_tests.gyp
+++ b/system_wrappers/source/system_wrappers_tests.gyp
@@ -35,7 +35,6 @@
'stl_util_unittest.cc',
'thread_unittest.cc',
'thread_posix_unittest.cc',
- 'unittest_utilities_unittest.cc',
],
'conditions': [
['enable_data_logging==1', {
diff --git a/system_wrappers/source/unittest_utilities.h b/system_wrappers/source/unittest_utilities.h
deleted file mode 100644
index b32308e6..00000000
--- a/system_wrappers/source/unittest_utilities.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_UNITTEST_UTILITIES_H_
-#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_UNITTEST_UTILITIES_H_
-
-// This file contains utilities that make it simpler to write unittests
-// that are appropriate for the system_wrappers classes.
-
-#include <stdio.h>
-#include <string.h>
-
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-class TestTraceCallback : public TraceCallback {
- public:
- virtual void Print(TraceLevel level, const char* msg, int length) {
- if (msg) {
- char* cmd_print = new char[length+1];
- memcpy(cmd_print, msg, length);
- cmd_print[length] = '\0';
- printf("%s\n", cmd_print);
- fflush(stdout);
- delete[] cmd_print;
- }
- }
-};
-
-// A class that turns on tracing to stdout at the beginning of the test,
-// and turns it off once the test is finished.
-// Intended usage:
-// class SomeTest : public ::testing::Test {
-// protected:
-// SomeTest()
-// : trace_(false) {} // Change to true to turn on tracing.
-// private:
-// ScopedTracing trace_;
-// }
-class ScopedTracing {
- public:
- explicit ScopedTracing(bool logOn) {
- logging_ = logOn;
- StartTrace();
- }
-
- ~ScopedTracing() {
- StopTrace();
- }
-
- private:
- void StartTrace() {
- if (logging_) {
- Trace::CreateTrace();
- Trace::set_level_filter(webrtc::kTraceAll);
- Trace::SetTraceCallback(&trace_);
- }
- }
-
- void StopTrace() {
- if (logging_) {
- Trace::SetTraceCallback(NULL);
- Trace::ReturnTrace();
- }
- }
-
- private:
- bool logging_;
- TestTraceCallback trace_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_UNITTEST_UTILITIES_H_
diff --git a/system_wrappers/source/unittest_utilities_unittest.cc b/system_wrappers/source/unittest_utilities_unittest.cc
deleted file mode 100644
index be6647b7..00000000
--- a/system_wrappers/source/unittest_utilities_unittest.cc
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/source/unittest_utilities.h"
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-// These tests merely check that the code compiles and that no
-// fatal accidents happen when logging.
-TEST(UnittestUtilities, TraceOn) {
- ScopedTracing trace(true);
- WEBRTC_TRACE(kTraceInfo, kTraceUtility, 0, "Log line that should appear");
- // TODO(hta): Verify that output appears.
- // Note - output is written on another thread, so can take time to appear.
-}
-
-TEST(UnittestUtilities, TraceOff) {
- ScopedTracing trace(false);
- WEBRTC_TRACE(kTraceInfo, kTraceUtility, 0,
- "Log line that should not appear");
- // TODO(hta): Verify that no output appears.
-}
-
-} // namespace webrtc
diff --git a/test/OWNERS b/test/OWNERS
index 1532e3fd..98a0d4d1 100644
--- a/test/OWNERS
+++ b/test/OWNERS
@@ -1,5 +1,6 @@
-phoglund@webrtc.org
kjellander@webrtc.org
+pbos@webrtc.org
+phoglund@webrtc.org
per-file *.isolate=kjellander@webrtc.org
diff --git a/test/call_test.cc b/test/call_test.cc
index 5d8b8e67..16dad719 100644
--- a/test/call_test.cc
+++ b/test/call_test.cc
@@ -15,8 +15,9 @@ namespace webrtc {
namespace test {
CallTest::CallTest()
- : send_stream_(NULL),
- fake_encoder_(Clock::GetRealTimeClock()) {
+ : clock_(Clock::GetRealTimeClock()),
+ send_stream_(NULL),
+ fake_encoder_(clock_) {
}
CallTest::~CallTest() {
}
@@ -85,7 +86,7 @@ void CallTest::CreateReceiverCall(const Call::Config& config) {
void CallTest::CreateSendConfig(size_t num_streams) {
assert(num_streams <= kNumSsrcs);
- send_config_ = sender_call_->GetDefaultSendConfig();
+ send_config_ = VideoSendStream::Config();
send_config_.encoder_settings.encoder = &fake_encoder_;
send_config_.encoder_settings.payload_name = "FAKE";
send_config_.encoder_settings.payload_type = kFakeSendPayloadType;
@@ -97,7 +98,7 @@ void CallTest::CreateSendConfig(size_t num_streams) {
void CallTest::CreateMatchingReceiveConfigs() {
assert(!send_config_.rtp.ssrcs.empty());
assert(receive_configs_.empty());
- VideoReceiveStream::Config config = receiver_call_->GetDefaultReceiveConfig();
+ VideoReceiveStream::Config config;
VideoCodec codec =
test::CreateDecoderVideoCodec(send_config_.encoder_settings);
config.codecs.push_back(codec);
@@ -121,7 +122,7 @@ void CallTest::CreateFrameGeneratorCapturer() {
stream.width,
stream.height,
stream.max_framerate,
- Clock::GetRealTimeClock()));
+ clock_));
}
void CallTest::CreateStreams() {
assert(send_stream_ == NULL);
@@ -150,7 +151,8 @@ const unsigned int CallTest::kLongTimeoutMs = 120 * 1000;
const uint8_t CallTest::kSendPayloadType = 100;
const uint8_t CallTest::kFakeSendPayloadType = 125;
const uint8_t CallTest::kSendRtxPayloadType = 98;
-const uint32_t CallTest::kSendRtxSsrc = 0xBADCAFE;
+const uint32_t CallTest::kSendRtxSsrcs[kNumSsrcs] = {0xBADCAFD, 0xBADCAFE,
+ 0xBADCAFF};
const uint32_t CallTest::kSendSsrcs[kNumSsrcs] = {0xC0FFED, 0xC0FFEE, 0xC0FFEF};
const uint32_t CallTest::kReceiverLocalSsrc = 0x123456;
const int CallTest::kNackRtpHistoryMs = 1000;
diff --git a/test/call_test.h b/test/call_test.h
index 37a883d6..e24d8359 100644
--- a/test/call_test.h
+++ b/test/call_test.h
@@ -35,7 +35,7 @@ class CallTest : public ::testing::Test {
static const uint8_t kSendPayloadType;
static const uint8_t kSendRtxPayloadType;
static const uint8_t kFakeSendPayloadType;
- static const uint32_t kSendRtxSsrc;
+ static const uint32_t kSendRtxSsrcs[kNumSsrcs];
static const uint32_t kSendSsrcs[kNumSsrcs];
static const uint32_t kReceiverLocalSsrc;
static const int kNackRtpHistoryMs;
@@ -58,6 +58,8 @@ class CallTest : public ::testing::Test {
void Stop();
void DestroyStreams();
+ Clock* const clock_;
+
scoped_ptr<Call> sender_call_;
VideoSendStream::Config send_config_;
std::vector<VideoStream> video_streams_;
diff --git a/test/encoder_settings.cc b/test/encoder_settings.cc
index 5193be65..9842d1ed 100644
--- a/test/encoder_settings.cc
+++ b/test/encoder_settings.cc
@@ -59,9 +59,13 @@ VideoCodec CreateDecoderVideoCodec(
codec.plType = encoder_settings.payload_type;
strcpy(codec.plName, encoder_settings.payload_name.c_str());
- codec.codecType =
- (encoder_settings.payload_name == "VP8" ? kVideoCodecVP8
- : kVideoCodecGeneric);
+ if (encoder_settings.payload_name == "VP8") {
+ codec.codecType = kVideoCodecVP8;
+ } else if (encoder_settings.payload_name == "H264") {
+ codec.codecType = kVideoCodecH264;
+ } else {
+ codec.codecType = kVideoCodecGeneric;
+ }
if (codec.codecType == kVideoCodecVP8) {
codec.codecSpecific.VP8.resilience = kResilientStream;
@@ -73,6 +77,12 @@ VideoCodec CreateDecoderVideoCodec(
codec.codecSpecific.VP8.keyFrameInterval = 3000;
}
+ if (codec.codecType == kVideoCodecH264) {
+ codec.codecSpecific.H264.profile = kProfileBase;
+ codec.codecSpecific.H264.frameDroppingOn = true;
+ codec.codecSpecific.H264.keyFrameInterval = 3000;
+ }
+
codec.width = 320;
codec.height = 180;
codec.startBitrate = codec.minBitrate = codec.maxBitrate = 300;
diff --git a/test/rtp_rtcp_observer.h b/test/rtp_rtcp_observer.h
index 670a29de..11531b3b 100644
--- a/test/rtp_rtcp_observer.h
+++ b/test/rtp_rtcp_observer.h
@@ -129,7 +129,7 @@ class RtpRtcpObserver {
private:
virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE {
- EXPECT_FALSE(RtpHeaderParser::IsRtcp(packet, static_cast<int>(length)));
+ EXPECT_FALSE(RtpHeaderParser::IsRtcp(packet, length));
Action action;
{
CriticalSectionScoped lock(crit_);
@@ -146,7 +146,7 @@ class RtpRtcpObserver {
}
virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
- EXPECT_TRUE(RtpHeaderParser::IsRtcp(packet, static_cast<int>(length)));
+ EXPECT_TRUE(RtpHeaderParser::IsRtcp(packet, length));
Action action;
{
CriticalSectionScoped lock(crit_);
diff --git a/video/OWNERS b/video/OWNERS
index 78a39675..b5f9aeba 100644
--- a/video/OWNERS
+++ b/video/OWNERS
@@ -1,7 +1,6 @@
mflodman@webrtc.org
stefan@webrtc.org
-wu@webrtc.org
-mallinath@webrtc.org
+pbos@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
diff --git a/video/bitrate_estimator_tests.cc b/video/bitrate_estimator_tests.cc
index d21de179..1ec95c3b 100644
--- a/video/bitrate_estimator_tests.cc
+++ b/video/bitrate_estimator_tests.cc
@@ -27,6 +27,92 @@
#include "webrtc/test/frame_generator_capturer.h"
namespace webrtc {
+namespace {
+// Note: consider to write tests that don't depend on the trace system instead
+// of re-using this class.
+class TraceObserver {
+ public:
+ TraceObserver() {
+ Trace::set_level_filter(kTraceTerseInfo);
+
+ Trace::CreateTrace();
+ Trace::SetTraceCallback(&callback_);
+
+ // Call webrtc trace to initialize the tracer that would otherwise trigger a
+ // data-race if left to be initialized by multiple threads (i.e. threads
+ // spawned by test::DirectTransport members in BitrateEstimatorTest).
+ WEBRTC_TRACE(kTraceStateInfo,
+ kTraceUtility,
+ -1,
+ "Instantiate without data races.");
+ }
+
+ ~TraceObserver() {
+ Trace::SetTraceCallback(NULL);
+ Trace::ReturnTrace();
+ }
+
+ void PushExpectedLogLine(const std::string& expected_log_line) {
+ callback_.PushExpectedLogLine(expected_log_line);
+ }
+
+ EventTypeWrapper Wait() {
+ return callback_.Wait();
+ }
+
+ private:
+ class Callback : public TraceCallback {
+ public:
+ Callback()
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ done_(EventWrapper::Create()) {}
+
+ virtual void Print(TraceLevel level,
+ const char* message,
+ int length) OVERRIDE {
+ CriticalSectionScoped lock(crit_sect_.get());
+ std::string msg(message);
+ if (msg.find("BitrateEstimator") != std::string::npos) {
+ received_log_lines_.push_back(msg);
+ }
+ int num_popped = 0;
+ while (!received_log_lines_.empty() && !expected_log_lines_.empty()) {
+ std::string a = received_log_lines_.front();
+ std::string b = expected_log_lines_.front();
+ received_log_lines_.pop_front();
+ expected_log_lines_.pop_front();
+ num_popped++;
+ EXPECT_TRUE(a.find(b) != std::string::npos);
+ }
+ if (expected_log_lines_.size() <= 0) {
+ if (num_popped > 0) {
+ done_->Set();
+ }
+ return;
+ }
+ }
+
+ EventTypeWrapper Wait() {
+ return done_->Wait(test::CallTest::kDefaultTimeoutMs);
+ }
+
+ void PushExpectedLogLine(const std::string& expected_log_line) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ expected_log_lines_.push_back(expected_log_line);
+ }
+
+ private:
+ typedef std::list<std::string> Strings;
+ const scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ Strings received_log_lines_ GUARDED_BY(crit_sect_);
+ Strings expected_log_lines_ GUARDED_BY(crit_sect_);
+ scoped_ptr<EventWrapper> done_;
+
+ };
+
+ Callback callback_;
+};
+} // namespace
static const int kTOFExtensionId = 4;
static const int kASTExtensionId = 5;
@@ -48,11 +134,6 @@ class BitrateEstimatorTest : public test::CallTest {
}
virtual void SetUp() {
- Trace::CreateTrace();
- Trace::SetTraceCallback(&receiver_trace_);
- // Reduce the chance that spurious traces will ruin the test.
- Trace::set_level_filter(kTraceTerseInfo);
-
Call::Config receiver_call_config(&receive_transport_);
receiver_call_.reset(Call::Create(receiver_call_config));
@@ -62,7 +143,7 @@ class BitrateEstimatorTest : public test::CallTest {
send_transport_.SetReceiver(receiver_call_->Receiver());
receive_transport_.SetReceiver(sender_call_->Receiver());
- send_config_ = sender_call_->GetDefaultSendConfig();
+ send_config_ = VideoSendStream::Config();
send_config_.rtp.ssrcs.push_back(kSendSsrcs[0]);
// Encoders will be set separately per stream.
send_config_.encoder_settings.encoder = NULL;
@@ -70,7 +151,7 @@ class BitrateEstimatorTest : public test::CallTest {
send_config_.encoder_settings.payload_type = kFakeSendPayloadType;
video_streams_ = test::CreateVideoStreams(1);
- receive_config_ = receiver_call_->GetDefaultReceiveConfig();
+ receive_config_ = VideoReceiveStream::Config();
assert(receive_config_.codecs.empty());
VideoCodec codec =
test::CreateDecoderVideoCodec(send_config_.encoder_settings);
@@ -97,63 +178,11 @@ class BitrateEstimatorTest : public test::CallTest {
}
receiver_call_.reset();
-
- Trace::SetTraceCallback(NULL);
- Trace::ReturnTrace();
}
protected:
friend class Stream;
- class TraceObserver : public TraceCallback {
- public:
- TraceObserver()
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- received_log_lines_(),
- expected_log_lines_(),
- done_(EventWrapper::Create()) {
- }
-
- void PushExpectedLogLine(const std::string& expected_log_line) {
- CriticalSectionScoped lock(crit_sect_.get());
- expected_log_lines_.push_back(expected_log_line);
- }
-
- virtual void Print(TraceLevel level,
- const char* message,
- int length) OVERRIDE {
- CriticalSectionScoped lock(crit_sect_.get());
- std::string msg(message);
- if (msg.find("BitrateEstimator") != std::string::npos) {
- received_log_lines_.push_back(msg);
- }
- int num_popped = 0;
- while (!received_log_lines_.empty() && !expected_log_lines_.empty()) {
- std::string a = received_log_lines_.front();
- std::string b = expected_log_lines_.front();
- received_log_lines_.pop_front();
- expected_log_lines_.pop_front();
- num_popped++;
- EXPECT_TRUE(a.find(b) != std::string::npos);
- }
- if (expected_log_lines_.size() <= 0) {
- if (num_popped > 0) {
- done_->Set();
- }
- return;
- }
- }
-
- EventTypeWrapper Wait() { return done_->Wait(kDefaultTimeoutMs); }
-
- private:
- typedef std::list<std::string> Strings;
- const scoped_ptr<CriticalSectionWrapper> crit_sect_;
- Strings received_log_lines_ GUARDED_BY(crit_sect_);
- Strings expected_log_lines_ GUARDED_BY(crit_sect_);
- scoped_ptr<EventWrapper> done_;
- };
-
class Stream {
public:
explicit Stream(BitrateEstimatorTest* test)
diff --git a/video/call.cc b/video/call.cc
index 6daa8b07..95e1c7b7 100644
--- a/video/call.cc
+++ b/video/call.cc
@@ -67,8 +67,6 @@ class Call : public webrtc::Call, public PacketReceiver {
virtual PacketReceiver* Receiver() OVERRIDE;
- virtual VideoSendStream::Config GetDefaultSendConfig() OVERRIDE;
-
virtual VideoSendStream* CreateVideoSendStream(
const VideoSendStream::Config& config,
const std::vector<VideoStream>& video_streams,
@@ -77,8 +75,6 @@ class Call : public webrtc::Call, public PacketReceiver {
virtual void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream)
OVERRIDE;
- virtual VideoReceiveStream::Config GetDefaultReceiveConfig() OVERRIDE;
-
virtual VideoReceiveStream* CreateVideoReceiveStream(
const VideoReceiveStream::Config& config) OVERRIDE;
@@ -93,9 +89,7 @@ class Call : public webrtc::Call, public PacketReceiver {
private:
DeliveryStatus DeliverRtcp(const uint8_t* packet, size_t length);
- DeliveryStatus DeliverRtp(const RTPHeader& header,
- const uint8_t* packet,
- size_t length);
+ DeliveryStatus DeliverRtp(const uint8_t* packet, size_t length);
Call::Config config_;
@@ -106,10 +100,10 @@ class Call : public webrtc::Call, public PacketReceiver {
std::map<uint32_t, VideoSendStream*> send_ssrcs_ GUARDED_BY(send_lock_);
scoped_ptr<RWLockWrapper> send_lock_;
- scoped_ptr<RtpHeaderParser> rtp_header_parser_;
-
scoped_ptr<CpuOveruseObserverProxy> overuse_observer_proxy_;
+ VideoSendStream::RtpStateMap suspended_send_ssrcs_;
+
VideoEngine* video_engine_;
ViERTP_RTCP* rtp_rtcp_;
ViECodec* codec_;
@@ -137,7 +131,6 @@ Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config)
: config_(config),
receive_lock_(RWLockWrapper::CreateRWLock()),
send_lock_(RWLockWrapper::CreateRWLock()),
- rtp_header_parser_(RtpHeaderParser::Create()),
video_engine_(video_engine),
base_channel_id_(-1) {
assert(video_engine != NULL);
@@ -173,11 +166,6 @@ Call::~Call() {
PacketReceiver* Call::Receiver() { return this; }
-VideoSendStream::Config Call::GetDefaultSendConfig() {
- VideoSendStream::Config config;
- return config;
-}
-
VideoSendStream* Call::CreateVideoSendStream(
const VideoSendStream::Config& config,
const std::vector<VideoStream>& video_streams,
@@ -193,6 +181,7 @@ VideoSendStream* Call::CreateVideoSendStream(
config,
video_streams,
encoder_settings,
+ suspended_send_ssrcs_,
base_channel_id_,
config_.start_bitrate_bps != -1 ? config_.start_bitrate_bps
: kDefaultVideoStreamBitrateBps);
@@ -208,31 +197,34 @@ VideoSendStream* Call::CreateVideoSendStream(
void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
assert(send_stream != NULL);
+ send_stream->Stop();
+
VideoSendStream* send_stream_impl = NULL;
{
WriteLockScoped write_lock(*send_lock_);
- for (std::map<uint32_t, VideoSendStream*>::iterator it =
- send_ssrcs_.begin();
- it != send_ssrcs_.end();
- ++it) {
+ std::map<uint32_t, VideoSendStream*>::iterator it = send_ssrcs_.begin();
+ while (it != send_ssrcs_.end()) {
if (it->second == static_cast<VideoSendStream*>(send_stream)) {
send_stream_impl = it->second;
- send_ssrcs_.erase(it);
- break;
+ send_ssrcs_.erase(it++);
+ } else {
+ ++it;
}
}
}
+ VideoSendStream::RtpStateMap rtp_state = send_stream_impl->GetRtpStates();
+
+ for (VideoSendStream::RtpStateMap::iterator it = rtp_state.begin();
+ it != rtp_state.end();
+ ++it) {
+ suspended_send_ssrcs_[it->first] = it->second;
+ }
+
assert(send_stream_impl != NULL);
delete send_stream_impl;
}
-VideoReceiveStream::Config Call::GetDefaultReceiveConfig() {
- VideoReceiveStream::Config config;
- config.rtp.remb = true;
- return config;
-}
-
VideoReceiveStream* Call::CreateVideoReceiveStream(
const VideoReceiveStream::Config& config) {
VideoReceiveStream* receive_stream =
@@ -291,7 +283,7 @@ uint32_t Call::ReceiveBitrateEstimate() {
return 0;
}
-Call::PacketReceiver::DeliveryStatus Call::DeliverRtcp(const uint8_t* packet,
+PacketReceiver::DeliveryStatus Call::DeliverRtcp(const uint8_t* packet,
size_t length) {
// TODO(pbos): Figure out what channel needs it actually.
// Do NOT broadcast! Also make sure it's a valid packet.
@@ -322,32 +314,32 @@ Call::PacketReceiver::DeliveryStatus Call::DeliverRtcp(const uint8_t* packet,
return rtcp_delivered ? DELIVERY_OK : DELIVERY_PACKET_ERROR;
}
-Call::PacketReceiver::DeliveryStatus Call::DeliverRtp(const RTPHeader& header,
- const uint8_t* packet,
- size_t length) {
+PacketReceiver::DeliveryStatus Call::DeliverRtp(const uint8_t* packet,
+ size_t length) {
+ // Minimum RTP header size.
+ if (length < 12)
+ return DELIVERY_PACKET_ERROR;
+
+ const uint8_t* ptr = &packet[8];
+ uint32_t ssrc = ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3] ;
+
ReadLockScoped read_lock(*receive_lock_);
std::map<uint32_t, VideoReceiveStream*>::iterator it =
- receive_ssrcs_.find(header.ssrc);
+ receive_ssrcs_.find(ssrc);
if (it == receive_ssrcs_.end())
return DELIVERY_UNKNOWN_SSRC;
- return it->second->DeliverRtp(static_cast<const uint8_t*>(packet), length)
- ? DELIVERY_OK
- : DELIVERY_PACKET_ERROR;
+ return it->second->DeliverRtp(packet, length) ? DELIVERY_OK
+ : DELIVERY_PACKET_ERROR;
}
-Call::PacketReceiver::DeliveryStatus Call::DeliverPacket(const uint8_t* packet,
- size_t length) {
- // TODO(pbos): ExtensionMap if there are extensions.
- if (RtpHeaderParser::IsRtcp(packet, static_cast<int>(length)))
+PacketReceiver::DeliveryStatus Call::DeliverPacket(const uint8_t* packet,
+ size_t length) {
+ if (RtpHeaderParser::IsRtcp(packet, length))
return DeliverRtcp(packet, length);
- RTPHeader rtp_header;
- if (!rtp_header_parser_->Parse(packet, static_cast<int>(length), &rtp_header))
- return DELIVERY_PACKET_ERROR;
-
- return DeliverRtp(rtp_header, packet, length);
+ return DeliverRtp(packet, length);
}
} // namespace internal
diff --git a/video/call_perf_tests.cc b/video/call_perf_tests.cc
index 18246168..62d2adcb 100644
--- a/video/call_perf_tests.cc
+++ b/video/call_perf_tests.cc
@@ -354,7 +354,7 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
virtual Action OnSendRtp(const uint8_t* packet, size_t length) {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
if (!rtp_start_timestamp_set_) {
// Calculate the rtp timestamp offset in order to calculate the real
diff --git a/video/end_to_end_tests.cc b/video/end_to_end_tests.cc
index fa3d3cf4..8946f890 100644
--- a/video/end_to_end_tests.cc
+++ b/video/end_to_end_tests.cc
@@ -58,6 +58,7 @@ class EndToEndTest : public test::CallTest {
void RespectsRtcpMode(newapi::RtcpMode rtcp_mode);
void TestXrReceiverReferenceTimeReport(bool enable_rrtr);
void TestSendsSetSsrcs(size_t num_ssrcs, bool send_single_ssrc_first);
+ void TestRtpStatePreservation(bool use_rtx);
};
TEST_F(EndToEndTest, ReceiverCanBeStartedTwice) {
@@ -253,8 +254,7 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
private:
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(
- rtp_parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(rtp_parser_->Parse(packet, length, &header));
// Never drop retransmitted packets.
if (dropped_packets_.find(header.sequenceNumber) !=
@@ -341,7 +341,7 @@ TEST_F(EndToEndTest, DISABLED_CanReceiveFec) {
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE
EXCLUSIVE_LOCKS_REQUIRED(crit_) {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
EXPECT_EQ(kRedPayloadType, header.payloadType);
int encapsulated_payload_type =
@@ -434,7 +434,7 @@ void EndToEndTest::DecodesRetransmittedFrame(bool retransmit_over_rtx) {
public:
explicit RetransmissionObserver(bool expect_rtx)
: EndToEndTest(kDefaultTimeoutMs),
- retransmission_ssrc_(expect_rtx ? kSendRtxSsrc : kSendSsrcs[0]),
+ retransmission_ssrc_(expect_rtx ? kSendRtxSsrcs[0] : kSendSsrcs[0]),
retransmission_payload_type_(expect_rtx ? kSendRtxPayloadType
: kFakeSendPayloadType),
marker_bits_observed_(0),
@@ -444,7 +444,7 @@ void EndToEndTest::DecodesRetransmittedFrame(bool retransmit_over_rtx) {
private:
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
if (header.timestamp == retransmitted_timestamp_) {
EXPECT_EQ(retransmission_ssrc_, header.ssrc);
@@ -481,10 +481,11 @@ void EndToEndTest::DecodesRetransmittedFrame(bool retransmit_over_rtx) {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].pre_render_callback = this;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
- if (retransmission_ssrc_ == kSendRtxSsrc) {
- send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrc);
+ if (retransmission_ssrc_ == kSendRtxSsrcs[0]) {
+ send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
- (*receive_configs)[0].rtp.rtx[kSendRtxPayloadType].ssrc = kSendRtxSsrc;
+ (*receive_configs)[0].rtp.rtx[kSendRtxPayloadType].ssrc =
+ kSendRtxSsrcs[0];
(*receive_configs)[0].rtp.rtx[kSendRtxPayloadType].payload_type =
kSendRtxPayloadType;
}
@@ -630,7 +631,7 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
private:
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
// Drop all retransmitted packets to force a PLI.
if (header.timestamp <= highest_dropped_timestamp_)
@@ -722,7 +723,7 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
private:
virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
size_t length) OVERRIDE {
- if (RtpHeaderParser::IsRtcp(packet, static_cast<int>(length))) {
+ if (RtpHeaderParser::IsRtcp(packet, length)) {
return receiver_->DeliverPacket(packet, length);
} else {
DeliveryStatus delivery_status =
@@ -917,7 +918,7 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
int height = codec_settings[i].height;
observers[i] = new VideoOutputObserver(&frame_generators[i], width, height);
- VideoSendStream::Config send_config = sender_call->GetDefaultSendConfig();
+ VideoSendStream::Config send_config;
send_config.rtp.ssrcs.push_back(ssrc);
send_config.encoder_settings.encoder = encoders[i].get();
send_config.encoder_settings.payload_name = "VP8";
@@ -933,8 +934,7 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
sender_call->CreateVideoSendStream(send_config, video_streams, NULL);
send_streams[i]->Start();
- VideoReceiveStream::Config receive_config =
- receiver_call->GetDefaultReceiveConfig();
+ VideoReceiveStream::Config receive_config;
receive_config.renderer = observers[i];
receive_config.rtp.remote_ssrc = ssrc;
receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
@@ -1187,7 +1187,7 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
private:
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
EXPECT_TRUE(valid_ssrcs_[header.ssrc])
<< "Received unknown SSRC: " << header.ssrc;
@@ -1543,4 +1543,251 @@ TEST_F(EndToEndTest, CanSwitchToUseAllSsrcs) {
TestSendsSetSsrcs(kNumSsrcs, true);
}
+TEST_F(EndToEndTest, RedundantPayloadsTransmittedOnAllSsrcs) {
+ class ObserveRedundantPayloads: public test::EndToEndTest {
+ public:
+ ObserveRedundantPayloads()
+ : EndToEndTest(kDefaultTimeoutMs), ssrcs_to_observe_(kNumSsrcs) {
+ for(size_t i = 0; i < kNumSsrcs; ++i) {
+ registered_rtx_ssrc_[kSendRtxSsrcs[i]] = true;
+ }
+ }
+
+ private:
+ virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
+
+ if (!registered_rtx_ssrc_[header.ssrc])
+ return SEND_PACKET;
+
+ EXPECT_LE(static_cast<size_t>(header.headerLength + header.paddingLength),
+ length);
+ const bool packet_is_redundant_payload =
+ static_cast<size_t>(header.headerLength + header.paddingLength) <
+ length;
+
+ if (!packet_is_redundant_payload)
+ return SEND_PACKET;
+
+ if (!observed_redundant_retransmission_[header.ssrc]) {
+ observed_redundant_retransmission_[header.ssrc] = true;
+ if (--ssrcs_to_observe_ == 0)
+ observation_complete_->Set();
+ }
+
+ return SEND_PACKET;
+ }
+
+ virtual size_t GetNumStreams() const OVERRIDE { return kNumSsrcs; }
+
+ virtual void ModifyConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ std::vector<VideoStream>* video_streams) OVERRIDE {
+ // Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
+ for (size_t i = 0; i < video_streams->size(); ++i) {
+ (*video_streams)[i].min_bitrate_bps = 10000;
+ (*video_streams)[i].target_bitrate_bps = 15000;
+ (*video_streams)[i].max_bitrate_bps = 20000;
+ }
+ // Significantly higher than max bitrates for all video streams -> forcing
+ // padding to trigger redundant padding on all RTX SSRCs.
+ send_config->rtp.min_transmit_bitrate_bps = 100000;
+
+ send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
+ send_config->rtp.rtx.pad_with_redundant_payloads = true;
+
+ for (size_t i = 0; i < kNumSsrcs; ++i)
+ send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+ }
+
+ virtual void PerformTest() OVERRIDE {
+ EXPECT_EQ(kEventSignaled, Wait())
+ << "Timed out while waiting for redundant payloads on all SSRCs.";
+ }
+
+ private:
+ size_t ssrcs_to_observe_;
+ std::map<uint32_t, bool> observed_redundant_retransmission_;
+ std::map<uint32_t, bool> registered_rtx_ssrc_;
+ } test;
+
+ RunBaseTest(&test);
+}
+
+void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
+ static const uint32_t kMaxSequenceNumberGap = 100;
+ static const uint64_t kMaxTimestampGap = kDefaultTimeoutMs * 90;
+ class RtpSequenceObserver : public test::RtpRtcpObserver {
+ public:
+ RtpSequenceObserver(bool use_rtx)
+ : test::RtpRtcpObserver(kDefaultTimeoutMs),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ ssrcs_to_observe_(kNumSsrcs) {
+ for (size_t i = 0; i < kNumSsrcs; ++i) {
+ configured_ssrcs_[kSendSsrcs[i]] = true;
+ if (use_rtx)
+ configured_ssrcs_[kSendRtxSsrcs[i]] = true;
+ }
+ }
+
+ void ResetExpectedSsrcs(size_t num_expected_ssrcs) {
+ CriticalSectionScoped lock(crit_.get());
+ ssrc_observed_.clear();
+ ssrcs_to_observe_ = num_expected_ssrcs;
+ }
+
+ private:
+ virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
+ const uint32_t ssrc = header.ssrc;
+ const uint16_t sequence_number = header.sequenceNumber;
+ const uint32_t timestamp = header.timestamp;
+ const bool only_padding =
+ static_cast<size_t>(header.headerLength + header.paddingLength) ==
+ length;
+
+ EXPECT_TRUE(configured_ssrcs_[ssrc])
+ << "Received SSRC that wasn't configured: " << ssrc;
+
+ std::map<uint32_t, uint16_t>::iterator it =
+ last_observed_sequence_number_.find(header.ssrc);
+ if (it == last_observed_sequence_number_.end()) {
+ last_observed_sequence_number_[ssrc] = sequence_number;
+ last_observed_timestamp_[ssrc] = timestamp;
+ } else {
+ // Verify sequence numbers are reasonably close.
+ uint32_t extended_sequence_number = sequence_number;
+ // Check for roll-over.
+ if (sequence_number < last_observed_sequence_number_[ssrc])
+ extended_sequence_number += 0xFFFFu + 1;
+ EXPECT_LE(
+ extended_sequence_number - last_observed_sequence_number_[ssrc],
+ kMaxSequenceNumberGap)
+ << "Gap in sequence numbers ("
+ << last_observed_sequence_number_[ssrc] << " -> " << sequence_number
+ << ") too large for SSRC: " << ssrc << ".";
+ last_observed_sequence_number_[ssrc] = sequence_number;
+
+ // TODO(pbos): Remove this check if we ever have monotonically
+ // increasing timestamps. Right now padding packets add a delta which
+ // can cause reordering between padding packets and regular packets,
+ // hence we drop padding-only packets to not flake.
+ if (only_padding) {
+ // Verify that timestamps are reasonably close.
+ uint64_t extended_timestamp = timestamp;
+ // Check for roll-over.
+ if (timestamp < last_observed_timestamp_[ssrc])
+ extended_timestamp += static_cast<uint64_t>(0xFFFFFFFFu) + 1;
+ EXPECT_LE(extended_timestamp - last_observed_timestamp_[ssrc],
+ kMaxTimestampGap)
+ << "Gap in timestamps (" << last_observed_timestamp_[ssrc]
+ << " -> " << timestamp << ") too large for SSRC: " << ssrc << ".";
+ }
+ last_observed_timestamp_[ssrc] = timestamp;
+ }
+
+ CriticalSectionScoped lock(crit_.get());
+ // Wait for media packets on all ssrcs.
+ if (!ssrc_observed_[ssrc] && !only_padding) {
+ ssrc_observed_[ssrc] = true;
+ if (--ssrcs_to_observe_ == 0)
+ observation_complete_->Set();
+ }
+
+ return SEND_PACKET;
+ }
+
+ std::map<uint32_t, uint16_t> last_observed_sequence_number_;
+ std::map<uint32_t, uint32_t> last_observed_timestamp_;
+ std::map<uint32_t, bool> configured_ssrcs_;
+
+ scoped_ptr<CriticalSectionWrapper> crit_;
+ size_t ssrcs_to_observe_ GUARDED_BY(crit_);
+ std::map<uint32_t, bool> ssrc_observed_ GUARDED_BY(crit_);
+ } observer(use_rtx);
+
+ CreateCalls(Call::Config(observer.SendTransport()),
+ Call::Config(observer.ReceiveTransport()));
+ observer.SetReceivers(sender_call_->Receiver(), NULL);
+
+ CreateSendConfig(kNumSsrcs);
+
+ if (use_rtx) {
+ for (size_t i = 0; i < kNumSsrcs; ++i) {
+ send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+ }
+ send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
+ }
+
+ // Lower bitrates so that all streams send initially.
+ for (size_t i = 0; i < video_streams_.size(); ++i) {
+ video_streams_[i].min_bitrate_bps = 10000;
+ video_streams_[i].target_bitrate_bps = 15000;
+ video_streams_[i].max_bitrate_bps = 20000;
+ }
+
+ CreateMatchingReceiveConfigs();
+
+ CreateStreams();
+ CreateFrameGeneratorCapturer();
+
+ Start();
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out waiting for all SSRCs to send packets.";
+
+ // Test stream resetting more than once to make sure that the state doesn't
+ // get set once (this could be due to using std::map::insert for instance).
+ for (size_t i = 0; i < 3; ++i) {
+ frame_generator_capturer_->Stop();
+ sender_call_->DestroyVideoSendStream(send_stream_);
+
+ // Re-create VideoSendStream with only one stream.
+ std::vector<VideoStream> one_stream = video_streams_;
+ one_stream.resize(1);
+ send_stream_ =
+ sender_call_->CreateVideoSendStream(send_config_, one_stream, NULL);
+ send_stream_->Start();
+ CreateFrameGeneratorCapturer();
+ frame_generator_capturer_->Start();
+
+ observer.ResetExpectedSsrcs(1);
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out waiting for single RTP packet.";
+
+ // Reconfigure back to use all streams.
+ send_stream_->ReconfigureVideoEncoder(video_streams_, NULL);
+ observer.ResetExpectedSsrcs(kNumSsrcs);
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out waiting for all SSRCs to send packets.";
+
+ // Reconfigure down to one stream.
+ send_stream_->ReconfigureVideoEncoder(one_stream, NULL);
+ observer.ResetExpectedSsrcs(1);
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out waiting for single RTP packet.";
+
+ // Reconfigure back to use all streams.
+ send_stream_->ReconfigureVideoEncoder(video_streams_, NULL);
+ observer.ResetExpectedSsrcs(kNumSsrcs);
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out waiting for all SSRCs to send packets.";
+ }
+
+ observer.StopSending();
+
+ Stop();
+ DestroyStreams();
+}
+
+TEST_F(EndToEndTest, RestartingSendStreamPreservesRtpState) {
+ TestRtpStatePreservation(false);
+}
+
+TEST_F(EndToEndTest, RestartingSendStreamPreservesRtpStatesWithRtx) {
+ TestRtpStatePreservation(true);
+}
+
} // namespace webrtc
diff --git a/video/full_stack.cc b/video/full_stack.cc
index e2a8e863..284efe20 100644
--- a/video/full_stack.cc
+++ b/video/full_stack.cc
@@ -104,7 +104,7 @@ class VideoAnalyzer : public PacketReceiver,
size_t length) OVERRIDE {
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
- parser->Parse(packet, static_cast<int>(length), &header);
+ parser->Parse(packet, length, &header);
{
CriticalSectionScoped lock(crit_.get());
recv_times_[header.timestamp - rtp_timestamp_delta_] =
@@ -143,7 +143,7 @@ class VideoAnalyzer : public PacketReceiver,
virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE {
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
- parser->Parse(packet, static_cast<int>(length), &header);
+ parser->Parse(packet, length, &header);
{
CriticalSectionScoped lock(crit_.get());
diff --git a/video/loopback.cc b/video/loopback.cc
index ea65ebb4..614ef47a 100644
--- a/video/loopback.cc
+++ b/video/loopback.cc
@@ -48,6 +48,9 @@ size_t StartBitrate() { return static_cast<size_t>(FLAGS_start_bitrate); }
DEFINE_int32(max_bitrate, 800, "Maximum video bitrate.");
size_t MaxBitrate() { return static_cast<size_t>(FLAGS_max_bitrate); }
+
+DEFINE_string(codec, "VP8", "Video codec to use.");
+std::string Codec() { return static_cast<std::string>(FLAGS_codec); }
} // namespace flags
static const uint32_t kSendSsrc = 0x654321;
@@ -68,14 +71,20 @@ void Loopback() {
// Loopback, call sends to itself.
transport.SetReceiver(call->Receiver());
- VideoSendStream::Config send_config = call->GetDefaultSendConfig();
+ VideoSendStream::Config send_config;
send_config.rtp.ssrcs.push_back(kSendSsrc);
send_config.local_renderer = local_preview.get();
-
- scoped_ptr<VP8Encoder> encoder(VP8Encoder::Create());
+ scoped_ptr<VideoEncoder> encoder;
+ if (flags::Codec() == "VP8") {
+ encoder.reset(VP8Encoder::Create());
+ } else {
+ // Codec not supported.
+ assert(false && "Codec not supported!");
+ return;
+ }
send_config.encoder_settings.encoder = encoder.get();
- send_config.encoder_settings.payload_name = "VP8";
+ send_config.encoder_settings.payload_name = flags::Codec();
send_config.encoder_settings.payload_type = 124;
std::vector<VideoStream> video_streams = test::CreateVideoStreams(1);
VideoStream* stream = &video_streams[0];
@@ -99,7 +108,7 @@ void Loopback() {
flags::Fps(),
test_clock));
- VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig();
+ VideoReceiveStream::Config receive_config;
receive_config.rtp.remote_ssrc = send_config.rtp.ssrcs[0];
receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
receive_config.renderer = loopback_video.get();
diff --git a/video/rampup_tests.cc b/video/rampup_tests.cc
index 5529f92e..b6f63814 100644
--- a/video/rampup_tests.cc
+++ b/video/rampup_tests.cc
@@ -7,549 +7,496 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include <assert.h>
-
-#include <map>
-#include <string>
-#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/call.h"
-#include "webrtc/common.h"
-#include "webrtc/experiments.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/event_wrapper.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/test/call_test.h"
-#include "webrtc/test/direct_transport.h"
-#include "webrtc/test/encoder_settings.h"
-#include "webrtc/test/fake_decoder.h"
-#include "webrtc/test/fake_encoder.h"
-#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/test/testsupport/perf_test.h"
-#include "webrtc/video/transport_adapter.h"
+#include "webrtc/video/rampup_tests.h"
namespace webrtc {
-
namespace {
-static const int kTransmissionTimeOffsetExtensionId = 6;
+
static const int kMaxPacketSize = 1500;
-static const unsigned int kSingleStreamTargetBps = 1000000;
-
-class StreamObserver : public newapi::Transport, public RemoteBitrateObserver {
- public:
- typedef std::map<uint32_t, int> BytesSentMap;
- typedef std::map<uint32_t, uint32_t> SsrcMap;
- StreamObserver(const SsrcMap& rtx_media_ssrcs,
- newapi::Transport* feedback_transport,
- Clock* clock)
- : clock_(clock),
- test_done_(EventWrapper::Create()),
- rtp_parser_(RtpHeaderParser::Create()),
- feedback_transport_(feedback_transport),
- receive_stats_(ReceiveStatistics::Create(clock)),
- payload_registry_(
- new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(false))),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
- expected_bitrate_bps_(0),
- start_bitrate_bps_(0),
- rtx_media_ssrcs_(rtx_media_ssrcs),
- total_sent_(0),
- padding_sent_(0),
- rtx_media_sent_(0),
- total_packets_sent_(0),
- padding_packets_sent_(0),
- rtx_media_packets_sent_(0) {
- // Ideally we would only have to instantiate an RtcpSender, an
- // RtpHeaderParser and a RemoteBitrateEstimator here, but due to the current
- // state of the RTP module we need a full module and receive statistics to
- // be able to produce an RTCP with REMB.
- RtpRtcp::Configuration config;
- config.receive_statistics = receive_stats_.get();
- feedback_transport_.Enable();
- config.outgoing_transport = &feedback_transport_;
- rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config));
- rtp_rtcp_->SetREMBStatus(true);
- rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound);
- rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
- kTransmissionTimeOffsetExtensionId);
- AbsoluteSendTimeRemoteBitrateEstimatorFactory rbe_factory;
- const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000;
- remote_bitrate_estimator_.reset(
- rbe_factory.Create(this, clock, kMimdControl,
- kRemoteBitrateEstimatorMinBitrateBps));
- }
- void set_expected_bitrate_bps(unsigned int expected_bitrate_bps) {
- CriticalSectionScoped lock(crit_.get());
- expected_bitrate_bps_ = expected_bitrate_bps;
- }
+std::vector<uint32_t> GenerateSsrcs(size_t num_streams,
+ uint32_t ssrc_offset) {
+ std::vector<uint32_t> ssrcs;
+ for (size_t i = 0; i != num_streams; ++i)
+ ssrcs.push_back(static_cast<uint32_t>(ssrc_offset + i));
+ return ssrcs;
+}
+} // namespace
- void set_start_bitrate_bps(unsigned int start_bitrate_bps) {
- CriticalSectionScoped lock(crit_.get());
- start_bitrate_bps_ = start_bitrate_bps;
- }
+StreamObserver::StreamObserver(const SsrcMap& rtx_media_ssrcs,
+ newapi::Transport* feedback_transport,
+ Clock* clock,
+ RemoteBitrateEstimatorFactory* rbe_factory,
+ RateControlType control_type)
+ : clock_(clock),
+ test_done_(EventWrapper::Create()),
+ rtp_parser_(RtpHeaderParser::Create()),
+ feedback_transport_(feedback_transport),
+ receive_stats_(ReceiveStatistics::Create(clock)),
+ payload_registry_(
+ new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(false))),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ expected_bitrate_bps_(0),
+ start_bitrate_bps_(0),
+ rtx_media_ssrcs_(rtx_media_ssrcs),
+ total_sent_(0),
+ padding_sent_(0),
+ rtx_media_sent_(0),
+ total_packets_sent_(0),
+ padding_packets_sent_(0),
+ rtx_media_packets_sent_(0),
+ test_start_ms_(clock_->TimeInMilliseconds()),
+ ramp_up_finished_ms_(0) {
+ // Ideally we would only have to instantiate an RtcpSender, an
+ // RtpHeaderParser and a RemoteBitrateEstimator here, but due to the current
+ // state of the RTP module we need a full module and receive statistics to
+ // be able to produce an RTCP with REMB.
+ RtpRtcp::Configuration config;
+ config.receive_statistics = receive_stats_.get();
+ feedback_transport_.Enable();
+ config.outgoing_transport = &feedback_transport_;
+ rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config));
+ rtp_rtcp_->SetREMBStatus(true);
+ rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound);
+ rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsSendTimeExtensionId);
+ rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId);
+ const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000;
+ remote_bitrate_estimator_.reset(
+ rbe_factory->Create(this, clock, control_type,
+ kRemoteBitrateEstimatorMinBitrateBps));
+}
- virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
- unsigned int bitrate) OVERRIDE {
- CriticalSectionScoped lock(crit_.get());
- assert(expected_bitrate_bps_ > 0);
- if (start_bitrate_bps_ != 0) {
- // For tests with an explicitly set start bitrate, verify the first
- // bitrate estimate is close to the start bitrate and lower than the
- // test target bitrate. This is to verify a call respects the configured
- // start bitrate, but due to the BWE implementation we can't guarantee the
- // first estimate really is as high as the start bitrate.
- EXPECT_GT(bitrate, 0.9 * start_bitrate_bps_);
- EXPECT_LT(bitrate, expected_bitrate_bps_);
- start_bitrate_bps_ = 0;
- }
- if (bitrate >= expected_bitrate_bps_) {
- // Just trigger if there was any rtx padding packet.
- if (rtx_media_ssrcs_.empty() || rtx_media_sent_ > 0) {
- TriggerTestDone();
- }
- }
- rtp_rtcp_->SetREMBData(
- bitrate, static_cast<uint8_t>(ssrcs.size()), &ssrcs[0]);
- rtp_rtcp_->Process();
- }
+void StreamObserver::set_expected_bitrate_bps(
+ unsigned int expected_bitrate_bps) {
+ CriticalSectionScoped lock(crit_.get());
+ expected_bitrate_bps_ = expected_bitrate_bps;
+}
- virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE {
- CriticalSectionScoped lock(crit_.get());
- RTPHeader header;
- EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast<int>(length), &header));
- receive_stats_->IncomingPacket(header, length, false);
- payload_registry_->SetIncomingPayloadType(header);
- remote_bitrate_estimator_->IncomingPacket(
- clock_->TimeInMilliseconds(), static_cast<int>(length - 12), header);
- if (remote_bitrate_estimator_->TimeUntilNextProcess() <= 0) {
- remote_bitrate_estimator_->Process();
- }
- total_sent_ += length;
- padding_sent_ += header.paddingLength;
- ++total_packets_sent_;
- if (header.paddingLength > 0)
- ++padding_packets_sent_;
- if (rtx_media_ssrcs_.find(header.ssrc) != rtx_media_ssrcs_.end()) {
- rtx_media_sent_ += length - header.headerLength - header.paddingLength;
- if (header.paddingLength == 0)
- ++rtx_media_packets_sent_;
- uint8_t restored_packet[kMaxPacketSize];
- uint8_t* restored_packet_ptr = restored_packet;
- int restored_length = static_cast<int>(length);
- payload_registry_->RestoreOriginalPacket(&restored_packet_ptr,
- packet,
- &restored_length,
- rtx_media_ssrcs_[header.ssrc],
- header);
- length = restored_length;
- EXPECT_TRUE(rtp_parser_->Parse(
- restored_packet, static_cast<int>(length), &header));
- } else {
- rtp_rtcp_->SetRemoteSSRC(header.ssrc);
+void StreamObserver::set_start_bitrate_bps(unsigned int start_bitrate_bps) {
+ CriticalSectionScoped lock(crit_.get());
+ start_bitrate_bps_ = start_bitrate_bps;
+}
+
+void StreamObserver::OnReceiveBitrateChanged(
+ const std::vector<unsigned int>& ssrcs, unsigned int bitrate) {
+ CriticalSectionScoped lock(crit_.get());
+ assert(expected_bitrate_bps_ > 0);
+ if (start_bitrate_bps_ != 0) {
+ // For tests with an explicitly set start bitrate, verify the first
+ // bitrate estimate is close to the start bitrate and lower than the
+ // test target bitrate. This is to verify a call respects the configured
+ // start bitrate, but due to the BWE implementation we can't guarantee the
+ // first estimate really is as high as the start bitrate.
+ EXPECT_GT(bitrate, 0.9 * start_bitrate_bps_);
+ EXPECT_LT(bitrate, expected_bitrate_bps_);
+ start_bitrate_bps_ = 0;
+ }
+ if (bitrate >= expected_bitrate_bps_) {
+ ramp_up_finished_ms_ = clock_->TimeInMilliseconds();
+ // Just trigger if there was any rtx padding packet.
+ if (rtx_media_ssrcs_.empty() || rtx_media_sent_ > 0) {
+ TriggerTestDone();
}
- return true;
}
+ rtp_rtcp_->SetREMBData(
+ bitrate, static_cast<uint8_t>(ssrcs.size()), &ssrcs[0]);
+ rtp_rtcp_->Process();
+}
- virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
- return true;
+bool StreamObserver::SendRtp(const uint8_t* packet, size_t length) {
+ CriticalSectionScoped lock(crit_.get());
+ RTPHeader header;
+ EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast<int>(length), &header));
+ receive_stats_->IncomingPacket(header, length, false);
+ payload_registry_->SetIncomingPayloadType(header);
+ remote_bitrate_estimator_->IncomingPacket(
+ clock_->TimeInMilliseconds(), static_cast<int>(length - 12), header);
+ if (remote_bitrate_estimator_->TimeUntilNextProcess() <= 0) {
+ remote_bitrate_estimator_->Process();
+ }
+ total_sent_ += length;
+ padding_sent_ += header.paddingLength;
+ ++total_packets_sent_;
+ if (header.paddingLength > 0)
+ ++padding_packets_sent_;
+ if (rtx_media_ssrcs_.find(header.ssrc) != rtx_media_ssrcs_.end()) {
+ rtx_media_sent_ += length - header.headerLength - header.paddingLength;
+ if (header.paddingLength == 0)
+ ++rtx_media_packets_sent_;
+ uint8_t restored_packet[kMaxPacketSize];
+ uint8_t* restored_packet_ptr = restored_packet;
+ int restored_length = static_cast<int>(length);
+ payload_registry_->RestoreOriginalPacket(&restored_packet_ptr,
+ packet,
+ &restored_length,
+ rtx_media_ssrcs_[header.ssrc],
+ header);
+ length = restored_length;
+ EXPECT_TRUE(rtp_parser_->Parse(
+ restored_packet, static_cast<int>(length), &header));
+ } else {
+ rtp_rtcp_->SetRemoteSSRC(header.ssrc);
}
+ return true;
+}
- EventTypeWrapper Wait() { return test_done_->Wait(120 * 1000); }
+bool StreamObserver::SendRtcp(const uint8_t* packet, size_t length) {
+ return true;
+}
- private:
- void ReportResult(const std::string& measurement,
- size_t value,
- const std::string& units) {
- webrtc::test::PrintResult(
- measurement, "",
- ::testing::UnitTest::GetInstance()->current_test_info()->name(),
- value, units, false);
- }
+EventTypeWrapper StreamObserver::Wait() { return test_done_->Wait(120 * 1000); }
- void TriggerTestDone() EXCLUSIVE_LOCKS_REQUIRED(crit_) {
- ReportResult("total-sent", total_sent_, "bytes");
- ReportResult("padding-sent", padding_sent_, "bytes");
- ReportResult("rtx-media-sent", rtx_media_sent_, "bytes");
- ReportResult("total-packets-sent", total_packets_sent_, "packets");
- ReportResult("padding-packets-sent", padding_packets_sent_, "packets");
- ReportResult("rtx-packets-sent", rtx_media_packets_sent_, "packets");
- test_done_->Set();
- }
+void StreamObserver::ReportResult(const std::string& measurement,
+ size_t value,
+ const std::string& units) {
+ webrtc::test::PrintResult(
+ measurement, "",
+ ::testing::UnitTest::GetInstance()->current_test_info()->name(),
+ value, units, false);
+}
- Clock* const clock_;
- const scoped_ptr<EventWrapper> test_done_;
- const scoped_ptr<RtpHeaderParser> rtp_parser_;
- scoped_ptr<RtpRtcp> rtp_rtcp_;
- internal::TransportAdapter feedback_transport_;
- const scoped_ptr<ReceiveStatistics> receive_stats_;
- const scoped_ptr<RTPPayloadRegistry> payload_registry_;
- scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
-
- const scoped_ptr<CriticalSectionWrapper> crit_;
- unsigned int expected_bitrate_bps_ GUARDED_BY(crit_);
- unsigned int start_bitrate_bps_ GUARDED_BY(crit_);
- SsrcMap rtx_media_ssrcs_ GUARDED_BY(crit_);
- size_t total_sent_ GUARDED_BY(crit_);
- size_t padding_sent_ GUARDED_BY(crit_);
- size_t rtx_media_sent_ GUARDED_BY(crit_);
- int total_packets_sent_ GUARDED_BY(crit_);
- int padding_packets_sent_ GUARDED_BY(crit_);
- int rtx_media_packets_sent_ GUARDED_BY(crit_);
-};
-
-class LowRateStreamObserver : public test::DirectTransport,
- public RemoteBitrateObserver,
- public PacketReceiver {
- public:
- LowRateStreamObserver(newapi::Transport* feedback_transport,
- Clock* clock,
- size_t number_of_streams,
- bool rtx_used)
- : clock_(clock),
- number_of_streams_(number_of_streams),
- rtx_used_(rtx_used),
- test_done_(EventWrapper::Create()),
- rtp_parser_(RtpHeaderParser::Create()),
- feedback_transport_(feedback_transport),
- receive_stats_(ReceiveStatistics::Create(clock)),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
- send_stream_(NULL),
- test_state_(kFirstRampup),
- state_start_ms_(clock_->TimeInMilliseconds()),
- interval_start_ms_(state_start_ms_),
- last_remb_bps_(0),
- sent_bytes_(0),
- total_overuse_bytes_(0),
- suspended_in_stats_(false) {
- RtpRtcp::Configuration config;
- config.receive_statistics = receive_stats_.get();
- feedback_transport_.Enable();
- config.outgoing_transport = &feedback_transport_;
- rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config));
- rtp_rtcp_->SetREMBStatus(true);
- rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound);
- rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
- kTransmissionTimeOffsetExtensionId);
- AbsoluteSendTimeRemoteBitrateEstimatorFactory rbe_factory;
- const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 10000;
- remote_bitrate_estimator_.reset(
- rbe_factory.Create(this, clock, kMimdControl,
- kRemoteBitrateEstimatorMinBitrateBps));
- forward_transport_config_.link_capacity_kbps =
- kHighBandwidthLimitBps / 1000;
- forward_transport_config_.queue_length = 100; // Something large.
- test::DirectTransport::SetConfig(forward_transport_config_);
- test::DirectTransport::SetReceiver(this);
- }
+void StreamObserver::TriggerTestDone() EXCLUSIVE_LOCKS_REQUIRED(crit_) {
+ ReportResult("ramp-up-total-sent", total_sent_, "bytes");
+ ReportResult("ramp-up-padding-sent", padding_sent_, "bytes");
+ ReportResult("ramp-up-rtx-media-sent", rtx_media_sent_, "bytes");
+ ReportResult("ramp-up-total-packets-sent", total_packets_sent_, "packets");
+ ReportResult("ramp-up-padding-packets-sent",
+ padding_packets_sent_,
+ "packets");
+ ReportResult("ramp-up-rtx-packets-sent",
+ rtx_media_packets_sent_,
+ "packets");
+ ReportResult("ramp-up-time",
+ ramp_up_finished_ms_ - test_start_ms_,
+ "milliseconds");
+ test_done_->Set();
+}
- virtual void SetSendStream(const VideoSendStream* send_stream) {
- CriticalSectionScoped lock(crit_.get());
- send_stream_ = send_stream;
- }
+LowRateStreamObserver::LowRateStreamObserver(
+ newapi::Transport* feedback_transport,
+ Clock* clock,
+ size_t number_of_streams,
+ bool rtx_used)
+ : clock_(clock),
+ number_of_streams_(number_of_streams),
+ rtx_used_(rtx_used),
+ test_done_(EventWrapper::Create()),
+ rtp_parser_(RtpHeaderParser::Create()),
+ feedback_transport_(feedback_transport),
+ receive_stats_(ReceiveStatistics::Create(clock)),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ send_stream_(NULL),
+ test_state_(kFirstRampup),
+ state_start_ms_(clock_->TimeInMilliseconds()),
+ interval_start_ms_(state_start_ms_),
+ last_remb_bps_(0),
+ sent_bytes_(0),
+ total_overuse_bytes_(0),
+ suspended_in_stats_(false) {
+ RtpRtcp::Configuration config;
+ config.receive_statistics = receive_stats_.get();
+ feedback_transport_.Enable();
+ config.outgoing_transport = &feedback_transport_;
+ rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config));
+ rtp_rtcp_->SetREMBStatus(true);
+ rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound);
+ rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId);
+ AbsoluteSendTimeRemoteBitrateEstimatorFactory rbe_factory;
+ const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 10000;
+ remote_bitrate_estimator_.reset(
+ rbe_factory.Create(this, clock, kMimdControl,
+ kRemoteBitrateEstimatorMinBitrateBps));
+ forward_transport_config_.link_capacity_kbps =
+ kHighBandwidthLimitBps / 1000;
+ forward_transport_config_.queue_length = 100; // Something large.
+ test::DirectTransport::SetConfig(forward_transport_config_);
+ test::DirectTransport::SetReceiver(this);
+}
- virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
- unsigned int bitrate) {
- CriticalSectionScoped lock(crit_.get());
- rtp_rtcp_->SetREMBData(
- bitrate, static_cast<uint8_t>(ssrcs.size()), &ssrcs[0]);
- rtp_rtcp_->Process();
- last_remb_bps_ = bitrate;
- }
+void LowRateStreamObserver::SetSendStream(const VideoSendStream* send_stream) {
+ CriticalSectionScoped lock(crit_.get());
+ send_stream_ = send_stream;
+}
- virtual bool SendRtp(const uint8_t* data, size_t length) OVERRIDE {
- CriticalSectionScoped lock(crit_.get());
- sent_bytes_ += length;
- int64_t now_ms = clock_->TimeInMilliseconds();
- if (now_ms > interval_start_ms_ + 1000) { // Let at least 1 second pass.
- // Verify that the send rate was about right.
- unsigned int average_rate_bps = static_cast<unsigned int>(sent_bytes_) *
- 8 * 1000 / (now_ms - interval_start_ms_);
- // TODO(holmer): Why is this failing?
- // EXPECT_LT(average_rate_bps, last_remb_bps_ * 1.1);
- if (average_rate_bps > last_remb_bps_ * 1.1) {
- total_overuse_bytes_ +=
- sent_bytes_ -
- last_remb_bps_ / 8 * (now_ms - interval_start_ms_) / 1000;
- }
- EvolveTestState(average_rate_bps);
- interval_start_ms_ = now_ms;
- sent_bytes_ = 0;
- }
- return test::DirectTransport::SendRtp(data, length);
- }
+void LowRateStreamObserver::OnReceiveBitrateChanged(
+ const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) {
+ CriticalSectionScoped lock(crit_.get());
+ rtp_rtcp_->SetREMBData(
+ bitrate, static_cast<uint8_t>(ssrcs.size()), &ssrcs[0]);
+ rtp_rtcp_->Process();
+ last_remb_bps_ = bitrate;
+}
- virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
- size_t length) OVERRIDE {
- CriticalSectionScoped lock(crit_.get());
- RTPHeader header;
- EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast<int>(length), &header));
- receive_stats_->IncomingPacket(header, length, false);
- remote_bitrate_estimator_->IncomingPacket(
- clock_->TimeInMilliseconds(), static_cast<int>(length - 12), header);
- if (remote_bitrate_estimator_->TimeUntilNextProcess() <= 0) {
- remote_bitrate_estimator_->Process();
+bool LowRateStreamObserver::SendRtp(const uint8_t* data, size_t length) {
+ CriticalSectionScoped lock(crit_.get());
+ sent_bytes_ += length;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (now_ms > interval_start_ms_ + 1000) { // Let at least 1 second pass.
+ // Verify that the send rate was about right.
+ unsigned int average_rate_bps = static_cast<unsigned int>(sent_bytes_) *
+ 8 * 1000 / (now_ms - interval_start_ms_);
+ // TODO(holmer): Why is this failing?
+ // EXPECT_LT(average_rate_bps, last_remb_bps_ * 1.1);
+ if (average_rate_bps > last_remb_bps_ * 1.1) {
+ total_overuse_bytes_ +=
+ sent_bytes_ -
+ last_remb_bps_ / 8 * (now_ms - interval_start_ms_) / 1000;
}
- suspended_in_stats_ = send_stream_->GetStats().suspended;
- return DELIVERY_OK;
+ EvolveTestState(average_rate_bps);
+ interval_start_ms_ = now_ms;
+ sent_bytes_ = 0;
}
+ return test::DirectTransport::SendRtp(data, length);
+}
- virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
- return true;
+PacketReceiver::DeliveryStatus LowRateStreamObserver::DeliverPacket(
+ const uint8_t* packet, size_t length) {
+ CriticalSectionScoped lock(crit_.get());
+ RTPHeader header;
+ EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast<int>(length), &header));
+ receive_stats_->IncomingPacket(header, length, false);
+ remote_bitrate_estimator_->IncomingPacket(
+ clock_->TimeInMilliseconds(), static_cast<int>(length - 12), header);
+ if (remote_bitrate_estimator_->TimeUntilNextProcess() <= 0) {
+ remote_bitrate_estimator_->Process();
}
+ suspended_in_stats_ = send_stream_->GetStats().suspended;
+ return DELIVERY_OK;
+}
- // Produces a string similar to "1stream_nortx", depending on the values of
- // number_of_streams_ and rtx_used_;
- std::string GetModifierString() {
- std::string str("_");
- char temp_str[5];
- sprintf(temp_str, "%i", static_cast<int>(number_of_streams_));
- str += std::string(temp_str);
- str += "stream";
- str += (number_of_streams_ > 1 ? "s" : "");
- str += "_";
- str += (rtx_used_ ? "" : "no");
- str += "rtx";
- return str;
- }
+bool LowRateStreamObserver::SendRtcp(const uint8_t* packet, size_t length) {
+ return true;
+}
- // This method defines the state machine for the ramp up-down-up test.
- void EvolveTestState(unsigned int bitrate_bps) {
- int64_t now = clock_->TimeInMilliseconds();
- CriticalSectionScoped lock(crit_.get());
- assert(send_stream_ != NULL);
- switch (test_state_) {
- case kFirstRampup: {
- EXPECT_FALSE(suspended_in_stats_);
- if (bitrate_bps > kExpectedHighBitrateBps) {
- // The first ramp-up has reached the target bitrate. Change the
- // channel limit, and move to the next test state.
- forward_transport_config_.link_capacity_kbps =
- kLowBandwidthLimitBps / 1000;
- test::DirectTransport::SetConfig(forward_transport_config_);
- test_state_ = kLowRate;
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "first_rampup",
- now - state_start_ms_,
- "ms",
- false);
- state_start_ms_ = now;
- interval_start_ms_ = now;
- sent_bytes_ = 0;
- }
- break;
+std::string LowRateStreamObserver::GetModifierString() {
+ std::string str("_");
+ char temp_str[5];
+ sprintf(temp_str, "%i",
+ static_cast<int>(number_of_streams_));
+ str += std::string(temp_str);
+ str += "stream";
+ str += (number_of_streams_ > 1 ? "s" : "");
+ str += "_";
+ str += (rtx_used_ ? "" : "no");
+ str += "rtx";
+ return str;
+}
+
+void LowRateStreamObserver::EvolveTestState(unsigned int bitrate_bps) {
+ int64_t now = clock_->TimeInMilliseconds();
+ CriticalSectionScoped lock(crit_.get());
+ assert(send_stream_ != NULL);
+ switch (test_state_) {
+ case kFirstRampup: {
+ EXPECT_FALSE(suspended_in_stats_);
+ if (bitrate_bps > kExpectedHighBitrateBps) {
+ // The first ramp-up has reached the target bitrate. Change the
+ // channel limit, and move to the next test state.
+ forward_transport_config_.link_capacity_kbps =
+ kLowBandwidthLimitBps / 1000;
+ test::DirectTransport::SetConfig(forward_transport_config_);
+ test_state_ = kLowRate;
+ webrtc::test::PrintResult("ramp_up_down_up",
+ GetModifierString(),
+ "first_rampup",
+ now - state_start_ms_,
+ "ms",
+ false);
+ state_start_ms_ = now;
+ interval_start_ms_ = now;
+ sent_bytes_ = 0;
}
- case kLowRate: {
- if (bitrate_bps < kExpectedLowBitrateBps && suspended_in_stats_) {
- // The ramp-down was successful. Change the channel limit back to a
- // high value, and move to the next test state.
- forward_transport_config_.link_capacity_kbps =
- kHighBandwidthLimitBps / 1000;
- test::DirectTransport::SetConfig(forward_transport_config_);
- test_state_ = kSecondRampup;
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "rampdown",
- now - state_start_ms_,
- "ms",
- false);
- state_start_ms_ = now;
- interval_start_ms_ = now;
- sent_bytes_ = 0;
- }
- break;
+ break;
+ }
+ case kLowRate: {
+ if (bitrate_bps < kExpectedLowBitrateBps && suspended_in_stats_) {
+ // The ramp-down was successful. Change the channel limit back to a
+ // high value, and move to the next test state.
+ forward_transport_config_.link_capacity_kbps =
+ kHighBandwidthLimitBps / 1000;
+ test::DirectTransport::SetConfig(forward_transport_config_);
+ test_state_ = kSecondRampup;
+ webrtc::test::PrintResult("ramp_up_down_up",
+ GetModifierString(),
+ "rampdown",
+ now - state_start_ms_,
+ "ms",
+ false);
+ state_start_ms_ = now;
+ interval_start_ms_ = now;
+ sent_bytes_ = 0;
}
- case kSecondRampup: {
- if (bitrate_bps > kExpectedHighBitrateBps && !suspended_in_stats_) {
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "second_rampup",
- now - state_start_ms_,
- "ms",
- false);
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "total_overuse",
- total_overuse_bytes_,
- "bytes",
- false);
- test_done_->Set();
- }
- break;
+ break;
+ }
+ case kSecondRampup: {
+ if (bitrate_bps > kExpectedHighBitrateBps && !suspended_in_stats_) {
+ webrtc::test::PrintResult("ramp_up_down_up",
+ GetModifierString(),
+ "second_rampup",
+ now - state_start_ms_,
+ "ms",
+ false);
+ webrtc::test::PrintResult("ramp_up_down_up",
+ GetModifierString(),
+ "total_overuse",
+ total_overuse_bytes_,
+ "bytes",
+ false);
+ test_done_->Set();
}
+ break;
}
}
+}
- EventTypeWrapper Wait() {
- return test_done_->Wait(test::CallTest::kLongTimeoutMs);
- }
-
- private:
- static const unsigned int kHighBandwidthLimitBps = 80000;
- static const unsigned int kExpectedHighBitrateBps = 60000;
- static const unsigned int kLowBandwidthLimitBps = 20000;
- static const unsigned int kExpectedLowBitrateBps = 20000;
- enum TestStates { kFirstRampup, kLowRate, kSecondRampup };
-
- Clock* const clock_;
- const size_t number_of_streams_;
- const bool rtx_used_;
- const scoped_ptr<EventWrapper> test_done_;
- const scoped_ptr<RtpHeaderParser> rtp_parser_;
- scoped_ptr<RtpRtcp> rtp_rtcp_;
- internal::TransportAdapter feedback_transport_;
- const scoped_ptr<ReceiveStatistics> receive_stats_;
- scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
-
- scoped_ptr<CriticalSectionWrapper> crit_;
- const VideoSendStream* send_stream_ GUARDED_BY(crit_);
- FakeNetworkPipe::Config forward_transport_config_ GUARDED_BY(crit_);
- TestStates test_state_ GUARDED_BY(crit_);
- int64_t state_start_ms_ GUARDED_BY(crit_);
- int64_t interval_start_ms_ GUARDED_BY(crit_);
- unsigned int last_remb_bps_ GUARDED_BY(crit_);
- size_t sent_bytes_ GUARDED_BY(crit_);
- size_t total_overuse_bytes_ GUARDED_BY(crit_);
- bool suspended_in_stats_ GUARDED_BY(crit_);
-};
-} // namespace
+EventTypeWrapper LowRateStreamObserver::Wait() {
+ return test_done_->Wait(test::CallTest::kLongTimeoutMs);
+}
-class RampUpTest : public test::CallTest {
- protected:
- void RunRampUpTest(bool rtx,
- size_t num_streams,
- unsigned int start_bitrate_bps) {
- std::vector<uint32_t> ssrcs(GenerateSsrcs(num_streams, 100));
- std::vector<uint32_t> rtx_ssrcs(GenerateSsrcs(num_streams, 200));
- StreamObserver::SsrcMap rtx_ssrc_map;
- if (rtx) {
- for (size_t i = 0; i < ssrcs.size(); ++i)
- rtx_ssrc_map[rtx_ssrcs[i]] = ssrcs[i];
- }
- test::DirectTransport receiver_transport;
- StreamObserver stream_observer(rtx_ssrc_map,
- &receiver_transport,
- Clock::GetRealTimeClock());
-
- Call::Config call_config(&stream_observer);
- if (start_bitrate_bps != 0) {
- call_config.start_bitrate_bps = start_bitrate_bps;
- stream_observer.set_start_bitrate_bps(start_bitrate_bps);
- }
+void RampUpTest::RunRampUpTest(bool rtx,
+ size_t num_streams,
+ unsigned int start_bitrate_bps,
+ const std::string& extension_type) {
+ std::vector<uint32_t> ssrcs(GenerateSsrcs(num_streams, 100));
+ std::vector<uint32_t> rtx_ssrcs(GenerateSsrcs(num_streams, 200));
+ StreamObserver::SsrcMap rtx_ssrc_map;
+ if (rtx) {
+ for (size_t i = 0; i < ssrcs.size(); ++i)
+ rtx_ssrc_map[rtx_ssrcs[i]] = ssrcs[i];
+ }
- CreateSenderCall(call_config);
- CreateSendConfig(num_streams);
+ CreateSendConfig(num_streams);
- receiver_transport.SetReceiver(sender_call_->Receiver());
+ scoped_ptr<RemoteBitrateEstimatorFactory> rbe_factory;
+ RateControlType control_type;
+ if (extension_type == RtpExtension::kAbsSendTime) {
+ control_type = kAimdControl;
+ rbe_factory.reset(new AbsoluteSendTimeRemoteBitrateEstimatorFactory);
+ send_config_.rtp.extensions.push_back(RtpExtension(
+ extension_type.c_str(), kAbsSendTimeExtensionId));
+ } else {
+ control_type = kMimdControl;
+ rbe_factory.reset(new RemoteBitrateEstimatorFactory);
+ send_config_.rtp.extensions.push_back(RtpExtension(
+ extension_type.c_str(), kTransmissionTimeOffsetExtensionId));
+ }
- if (num_streams == 1) {
- video_streams_[0].target_bitrate_bps = 2000000;
- video_streams_[0].max_bitrate_bps = 2000000;
- }
+ test::DirectTransport receiver_transport;
+ StreamObserver stream_observer(rtx_ssrc_map,
+ &receiver_transport,
+ Clock::GetRealTimeClock(),
+ rbe_factory.get(),
+ control_type);
+
+ Call::Config call_config(&stream_observer);
+ if (start_bitrate_bps != 0) {
+ call_config.start_bitrate_bps = start_bitrate_bps;
+ stream_observer.set_start_bitrate_bps(start_bitrate_bps);
+ }
- send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
- send_config_.rtp.ssrcs = ssrcs;
- if (rtx) {
- send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
- send_config_.rtp.rtx.ssrcs = rtx_ssrcs;
- send_config_.rtp.rtx.pad_with_redundant_payloads = true;
- }
- send_config_.rtp.extensions.push_back(RtpExtension(
- RtpExtension::kTOffset, kTransmissionTimeOffsetExtensionId));
-
- if (num_streams == 1) {
- // For single stream rampup until 1mbps
- stream_observer.set_expected_bitrate_bps(kSingleStreamTargetBps);
- } else {
- // For multi stream rampup until all streams are being sent. That means
- // enough birate to send all the target streams plus the min bitrate of
- // the last one.
- int expected_bitrate_bps = video_streams_.back().min_bitrate_bps;
- for (size_t i = 0; i < video_streams_.size() - 1; ++i) {
- expected_bitrate_bps += video_streams_[i].target_bitrate_bps;
- }
- stream_observer.set_expected_bitrate_bps(expected_bitrate_bps);
- }
+ CreateSenderCall(call_config);
- CreateStreams();
- CreateFrameGeneratorCapturer();
+ receiver_transport.SetReceiver(sender_call_->Receiver());
- Start();
+ if (num_streams == 1) {
+ video_streams_[0].target_bitrate_bps = 2000000;
+ video_streams_[0].max_bitrate_bps = 2000000;
+ }
- EXPECT_EQ(kEventSignaled, stream_observer.Wait());
+ send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ send_config_.rtp.ssrcs = ssrcs;
+ if (rtx) {
+ send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
+ send_config_.rtp.rtx.ssrcs = rtx_ssrcs;
+ send_config_.rtp.rtx.pad_with_redundant_payloads = true;
+ }
- Stop();
- DestroyStreams();
+ if (num_streams == 1) {
+ // For single stream rampup until 1mbps
+ stream_observer.set_expected_bitrate_bps(kSingleStreamTargetBps);
+ } else {
+ // For multi stream rampup until all streams are being sent. That means
+ // enough birate to send all the target streams plus the min bitrate of
+ // the last one.
+ int expected_bitrate_bps = video_streams_.back().min_bitrate_bps;
+ for (size_t i = 0; i < video_streams_.size() - 1; ++i) {
+ expected_bitrate_bps += video_streams_[i].target_bitrate_bps;
+ }
+ stream_observer.set_expected_bitrate_bps(expected_bitrate_bps);
}
- void RunRampUpDownUpTest(size_t number_of_streams, bool rtx) {
- std::vector<uint32_t> ssrcs;
- for (size_t i = 0; i < number_of_streams; ++i)
- ssrcs.push_back(static_cast<uint32_t>(i + 1));
- test::DirectTransport receiver_transport;
- LowRateStreamObserver stream_observer(
- &receiver_transport, Clock::GetRealTimeClock(), number_of_streams, rtx);
+ CreateStreams();
+ CreateFrameGeneratorCapturer();
- Call::Config call_config(&stream_observer);
- webrtc::Config webrtc_config;
- call_config.webrtc_config = &webrtc_config;
- webrtc_config.Set<PaddingStrategy>(new PaddingStrategy(rtx));
- CreateSenderCall(call_config);
- receiver_transport.SetReceiver(sender_call_->Receiver());
+ Start();
- CreateSendConfig(number_of_streams);
+ EXPECT_EQ(kEventSignaled, stream_observer.Wait());
- send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
- send_config_.rtp.extensions.push_back(RtpExtension(
- RtpExtension::kTOffset, kTransmissionTimeOffsetExtensionId));
- send_config_.suspend_below_min_bitrate = true;
+ Stop();
+ DestroyStreams();
+}
- CreateStreams();
- stream_observer.SetSendStream(send_stream_);
+void RampUpTest::RunRampUpDownUpTest(size_t number_of_streams, bool rtx) {
+ test::DirectTransport receiver_transport;
+ LowRateStreamObserver stream_observer(
+ &receiver_transport, Clock::GetRealTimeClock(), number_of_streams, rtx);
+
+ Call::Config call_config(&stream_observer);
+ CreateSenderCall(call_config);
+ receiver_transport.SetReceiver(sender_call_->Receiver());
+
+ CreateSendConfig(number_of_streams);
+
+ send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ send_config_.rtp.extensions.push_back(RtpExtension(
+ RtpExtension::kTOffset, kTransmissionTimeOffsetExtensionId));
+ send_config_.suspend_below_min_bitrate = true;
+ if (rtx) {
+ send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
+ send_config_.rtp.rtx.ssrcs = GenerateSsrcs(number_of_streams, 200);
+ send_config_.rtp.rtx.pad_with_redundant_payloads = true;
+ }
- CreateFrameGeneratorCapturer();
+ CreateStreams();
+ stream_observer.SetSendStream(send_stream_);
- Start();
+ CreateFrameGeneratorCapturer();
- EXPECT_EQ(kEventSignaled, stream_observer.Wait());
+ Start();
- Stop();
- DestroyStreams();
- }
+ EXPECT_EQ(kEventSignaled, stream_observer.Wait());
- private:
- std::vector<uint32_t> GenerateSsrcs(size_t num_streams,
- uint32_t ssrc_offset) {
- std::vector<uint32_t> ssrcs;
- for (size_t i = 0; i != num_streams; ++i)
- ssrcs.push_back(static_cast<uint32_t>(ssrc_offset + i));
- return ssrcs;
- }
-};
+ Stop();
+ DestroyStreams();
+}
TEST_F(RampUpTest, SingleStream) {
- RunRampUpTest(false, 1, 0);
+ RunRampUpTest(false, 1, 0, RtpExtension::kTOffset);
}
TEST_F(RampUpTest, Simulcast) {
- RunRampUpTest(false, 3, 0);
+ RunRampUpTest(false, 3, 0, RtpExtension::kTOffset);
}
TEST_F(RampUpTest, SimulcastWithRtx) {
- RunRampUpTest(true, 3, 0);
+ RunRampUpTest(true, 3, 0, RtpExtension::kTOffset);
}
TEST_F(RampUpTest, SingleStreamWithHighStartBitrate) {
- RunRampUpTest(false, 1, 0.9 * kSingleStreamTargetBps);
+ RunRampUpTest(false, 1, 0.9 * kSingleStreamTargetBps, RtpExtension::kTOffset);
}
TEST_F(RampUpTest, UpDownUpOneStream) { RunRampUpDownUpTest(1, false); }
diff --git a/video/rampup_tests.h b/video/rampup_tests.h
new file mode 100644
index 00000000..69399b41
--- /dev/null
+++ b/video/rampup_tests.h
@@ -0,0 +1,159 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_RAMPUP_TESTS_H_
+#define WEBRTC_VIDEO_RAMPUP_TESTS_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "webrtc/call.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/test/call_test.h"
+#include "webrtc/video/transport_adapter.h"
+
+namespace webrtc {
+
+static const int kTransmissionTimeOffsetExtensionId = 6;
+static const int kAbsSendTimeExtensionId = 7;
+static const unsigned int kSingleStreamTargetBps = 1000000;
+
+class Clock;
+class CriticalSectionWrapper;
+class ReceiveStatistics;
+class RtpHeaderParser;
+class RTPPayloadRegistry;
+class RtpRtcp;
+
+class StreamObserver : public newapi::Transport, public RemoteBitrateObserver {
+ public:
+ typedef std::map<uint32_t, int> BytesSentMap;
+ typedef std::map<uint32_t, uint32_t> SsrcMap;
+ StreamObserver(const SsrcMap& rtx_media_ssrcs,
+ newapi::Transport* feedback_transport,
+ Clock* clock,
+ RemoteBitrateEstimatorFactory* rbe_factory,
+ RateControlType control_type);
+
+ void set_expected_bitrate_bps(unsigned int expected_bitrate_bps);
+
+ void set_start_bitrate_bps(unsigned int start_bitrate_bps);
+
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) OVERRIDE;
+
+ virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE;
+
+ virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE;
+
+ EventTypeWrapper Wait();
+
+ private:
+ void ReportResult(const std::string& measurement,
+ size_t value,
+ const std::string& units);
+ void TriggerTestDone() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ Clock* const clock_;
+ const scoped_ptr<EventWrapper> test_done_;
+ const scoped_ptr<RtpHeaderParser> rtp_parser_;
+ scoped_ptr<RtpRtcp> rtp_rtcp_;
+ internal::TransportAdapter feedback_transport_;
+ const scoped_ptr<ReceiveStatistics> receive_stats_;
+ const scoped_ptr<RTPPayloadRegistry> payload_registry_;
+ scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+
+ const scoped_ptr<CriticalSectionWrapper> crit_;
+ unsigned int expected_bitrate_bps_ GUARDED_BY(crit_);
+ unsigned int start_bitrate_bps_ GUARDED_BY(crit_);
+ SsrcMap rtx_media_ssrcs_ GUARDED_BY(crit_);
+ size_t total_sent_ GUARDED_BY(crit_);
+ size_t padding_sent_ GUARDED_BY(crit_);
+ size_t rtx_media_sent_ GUARDED_BY(crit_);
+ int total_packets_sent_ GUARDED_BY(crit_);
+ int padding_packets_sent_ GUARDED_BY(crit_);
+ int rtx_media_packets_sent_ GUARDED_BY(crit_);
+ int64_t test_start_ms_ GUARDED_BY(crit_);
+ int64_t ramp_up_finished_ms_ GUARDED_BY(crit_);
+};
+
+class LowRateStreamObserver : public test::DirectTransport,
+ public RemoteBitrateObserver,
+ public PacketReceiver {
+ public:
+ LowRateStreamObserver(newapi::Transport* feedback_transport,
+ Clock* clock,
+ size_t number_of_streams,
+ bool rtx_used);
+
+ virtual void SetSendStream(const VideoSendStream* send_stream);
+
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate);
+
+ virtual bool SendRtp(const uint8_t* data, size_t length) OVERRIDE;
+
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) OVERRIDE;
+
+ virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE;
+
+ // Produces a string similar to "1stream_nortx", depending on the values of
+ // number_of_streams_ and rtx_used_;
+ std::string GetModifierString();
+
+ // This method defines the state machine for the ramp up-down-up test.
+ void EvolveTestState(unsigned int bitrate_bps);
+
+ EventTypeWrapper Wait();
+
+ private:
+ static const unsigned int kHighBandwidthLimitBps = 80000;
+ static const unsigned int kExpectedHighBitrateBps = 60000;
+ static const unsigned int kLowBandwidthLimitBps = 20000;
+ static const unsigned int kExpectedLowBitrateBps = 20000;
+ enum TestStates { kFirstRampup, kLowRate, kSecondRampup };
+
+ Clock* const clock_;
+ const size_t number_of_streams_;
+ const bool rtx_used_;
+ const scoped_ptr<EventWrapper> test_done_;
+ const scoped_ptr<RtpHeaderParser> rtp_parser_;
+ scoped_ptr<RtpRtcp> rtp_rtcp_;
+ internal::TransportAdapter feedback_transport_;
+ const scoped_ptr<ReceiveStatistics> receive_stats_;
+ scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+
+ scoped_ptr<CriticalSectionWrapper> crit_;
+ const VideoSendStream* send_stream_ GUARDED_BY(crit_);
+ FakeNetworkPipe::Config forward_transport_config_ GUARDED_BY(crit_);
+ TestStates test_state_ GUARDED_BY(crit_);
+ int64_t state_start_ms_ GUARDED_BY(crit_);
+ int64_t interval_start_ms_ GUARDED_BY(crit_);
+ unsigned int last_remb_bps_ GUARDED_BY(crit_);
+ size_t sent_bytes_ GUARDED_BY(crit_);
+ size_t total_overuse_bytes_ GUARDED_BY(crit_);
+ bool suspended_in_stats_ GUARDED_BY(crit_);
+};
+
+class RampUpTest : public test::CallTest {
+ protected:
+ void RunRampUpTest(bool rtx,
+ size_t num_streams,
+ unsigned int start_bitrate_bps,
+ const std::string& extension_type);
+
+ void RunRampUpDownUpTest(size_t number_of_streams, bool rtx);
+};
+} // namespace webrtc
+#endif // WEBRTC_VIDEO_RAMPUP_TESTS_H_
diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc
index 4d32ab40..ff3fe508 100644
--- a/video/video_send_stream.cc
+++ b/video/video_send_stream.cc
@@ -108,18 +108,21 @@ std::string VideoSendStream::Config::ToString() const {
}
namespace internal {
-VideoSendStream::VideoSendStream(newapi::Transport* transport,
- CpuOveruseObserver* overuse_observer,
- webrtc::VideoEngine* video_engine,
- const VideoSendStream::Config& config,
- const std::vector<VideoStream> video_streams,
- const void* encoder_settings,
- int base_channel,
- int start_bitrate_bps)
+VideoSendStream::VideoSendStream(
+ newapi::Transport* transport,
+ CpuOveruseObserver* overuse_observer,
+ webrtc::VideoEngine* video_engine,
+ const VideoSendStream::Config& config,
+ const std::vector<VideoStream> video_streams,
+ const void* encoder_settings,
+ const std::map<uint32_t, RtpState>& suspended_ssrcs,
+ int base_channel,
+ int start_bitrate_bps)
: transport_adapter_(transport),
encoded_frame_proxy_(config.post_encode_callback),
config_(config),
start_bitrate_bps_(start_bitrate_bps),
+ suspended_ssrcs_(suspended_ssrcs),
external_codec_(NULL),
channel_(-1),
stats_proxy_(new SendStatisticsProxy(config, this)) {
@@ -403,6 +406,9 @@ void VideoSendStream::ConfigureSsrcs() {
uint32_t ssrc = config_.rtp.ssrcs[i];
rtp_rtcp_->SetLocalSSRC(
channel_, ssrc, kViEStreamTypeNormal, static_cast<unsigned char>(i));
+ RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc);
+ if (it != suspended_ssrcs_.end())
+ rtp_rtcp_->SetRtpStateForSsrc(channel_, ssrc, it->second);
}
if (config_.rtp.rtx.ssrcs.empty()) {
@@ -412,11 +418,15 @@ void VideoSendStream::ConfigureSsrcs() {
// Set up RTX.
assert(config_.rtp.rtx.ssrcs.size() == config_.rtp.ssrcs.size());
- for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
+ for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
+ uint32_t ssrc = config_.rtp.rtx.ssrcs[i];
rtp_rtcp_->SetLocalSSRC(channel_,
config_.rtp.rtx.ssrcs[i],
kViEStreamTypeRtx,
static_cast<unsigned char>(i));
+ RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc);
+ if (it != suspended_ssrcs_.end())
+ rtp_rtcp_->SetRtpStateForSsrc(channel_, ssrc, it->second);
}
if (config_.rtp.rtx.pad_with_redundant_payloads) {
@@ -427,5 +437,20 @@ void VideoSendStream::ConfigureSsrcs() {
rtp_rtcp_->SetRtxSendPayloadType(channel_, config_.rtp.rtx.payload_type);
}
+std::map<uint32_t, RtpState> VideoSendStream::GetRtpStates() const {
+ std::map<uint32_t, RtpState> rtp_states;
+ for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
+ uint32_t ssrc = config_.rtp.ssrcs[i];
+ rtp_states[ssrc] = rtp_rtcp_->GetRtpStateForSsrc(channel_, ssrc);
+ }
+
+ for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
+ uint32_t ssrc = config_.rtp.rtx.ssrcs[i];
+ rtp_states[ssrc] = rtp_rtcp_->GetRtpStateForSsrc(channel_, ssrc);
+ }
+
+ return rtp_states;
+}
+
} // namespace internal
} // namespace webrtc
diff --git a/video/video_send_stream.h b/video/video_send_stream.h
index df65b74e..e1770624 100644
--- a/video/video_send_stream.h
+++ b/video/video_send_stream.h
@@ -11,7 +11,10 @@
#ifndef WEBRTC_VIDEO_VIDEO_SEND_STREAM_H_
#define WEBRTC_VIDEO_VIDEO_SEND_STREAM_H_
+#include <map>
+
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video/transport_adapter.h"
@@ -44,6 +47,7 @@ class VideoSendStream : public webrtc::VideoSendStream,
const VideoSendStream::Config& config,
const std::vector<VideoStream> video_streams,
const void* encoder_settings,
+ const std::map<uint32_t, RtpState>& suspended_ssrcs,
int base_channel,
int start_bitrate);
@@ -65,6 +69,9 @@ class VideoSendStream : public webrtc::VideoSendStream,
// From webrtc::VideoSendStream.
virtual VideoSendStreamInput* Input() OVERRIDE;
+ typedef std::map<uint32_t, RtpState> RtpStateMap;
+ RtpStateMap GetRtpStates() const;
+
protected:
// From SendStatisticsProxy::StreamStatsProvider.
virtual bool GetSendSideDelay(VideoSendStream::Stats* stats) OVERRIDE;
@@ -76,6 +83,7 @@ class VideoSendStream : public webrtc::VideoSendStream,
EncodedFrameCallbackAdapter encoded_frame_proxy_;
const VideoSendStream::Config config_;
const int start_bitrate_bps_;
+ std::map<uint32_t, RtpState> suspended_ssrcs_;
ViEBase* video_engine_base_;
ViECapture* capture_;
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index b1197ef6..08c4fafa 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -137,7 +137,7 @@ TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
EXPECT_FALSE(header.extension.hasTransmissionTimeOffset);
EXPECT_TRUE(header.extension.hasAbsoluteSendTime);
@@ -178,7 +178,7 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
private:
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
EXPECT_TRUE(header.extension.hasTransmissionTimeOffset);
EXPECT_FALSE(header.extension.hasAbsoluteSendTime);
@@ -323,7 +323,7 @@ TEST_F(VideoSendStreamTest, SupportsFec) {
private:
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
// Send lossy receive reports to trigger FEC enabling.
if (send_count_++ % 2 != 0) {
@@ -398,7 +398,7 @@ void VideoSendStreamTest::TestNackRetransmission(
private:
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
// Nack second packet after receiving the third one.
if (++send_count_ == 3) {
@@ -470,7 +470,7 @@ TEST_F(VideoSendStreamTest, RetransmitsNack) {
TEST_F(VideoSendStreamTest, RetransmitsNackOverRtx) {
// NACKs over RTX should use a separate SSRC.
- TestNackRetransmission(kSendRtxSsrc, kSendRtxPayloadType);
+ TestNackRetransmission(kSendRtxSsrcs[0], kSendRtxPayloadType);
}
void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
@@ -724,7 +724,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
CriticalSectionScoped lock(crit_.get());
++rtp_count_;
RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
last_sequence_number_ = header.sequenceNumber;
if (test_state_ == kBeforeSuspend) {
@@ -1041,11 +1041,11 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
private:
virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
size_t length) OVERRIDE {
- if (RtpHeaderParser::IsRtcp(packet, static_cast<int>(length)))
+ if (RtpHeaderParser::IsRtcp(packet, length))
return DELIVERY_OK;
RTPHeader header;
- if (!parser_->Parse(packet, static_cast<int>(length), &header))
+ if (!parser_->Parse(packet, length, &header))
return DELIVERY_PACKET_ERROR;
assert(stream_ != NULL);
VideoSendStream::Stats stats = stream_->GetStats();
@@ -1244,4 +1244,129 @@ I420VideoFrame* CreateI420VideoFrame(int width, int height, uint8_t data) {
return frame;
}
+TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
+ class EncoderStateObserver : public test::SendTest, public VideoEncoder {
+ public:
+ EncoderStateObserver()
+ : SendTest(kDefaultTimeoutMs),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ initialized_(false),
+ callback_registered_(false),
+ num_releases_(0),
+ released_(false) {}
+
+ bool IsReleased() {
+ CriticalSectionScoped lock(crit_.get());
+ return released_;
+ }
+
+ bool IsReadyForEncode() {
+ CriticalSectionScoped lock(crit_.get());
+ return initialized_ && callback_registered_;
+ }
+
+ size_t num_releases() {
+ CriticalSectionScoped lock(crit_.get());
+ return num_releases_;
+ }
+
+ private:
+ virtual int32_t InitEncode(const VideoCodec* codecSettings,
+ int32_t numberOfCores,
+ uint32_t maxPayloadSize) OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ EXPECT_FALSE(initialized_);
+ initialized_ = true;
+ released_ = false;
+ return 0;
+ }
+
+ virtual int32_t Encode(
+ const I420VideoFrame& inputImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<VideoFrameType>* frame_types) OVERRIDE {
+ EXPECT_TRUE(IsReadyForEncode());
+
+ observation_complete_->Set();
+ return 0;
+ }
+
+ virtual int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ EXPECT_TRUE(initialized_);
+ callback_registered_ = true;
+ return 0;
+ }
+
+ virtual int32_t Release() OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ EXPECT_TRUE(IsReadyForEncode());
+ EXPECT_FALSE(released_);
+ initialized_ = false;
+ callback_registered_ = false;
+ released_ = true;
+ ++num_releases_;
+ return 0;
+ }
+
+ virtual int32_t SetChannelParameters(uint32_t packetLoss,
+ int rtt) OVERRIDE {
+ EXPECT_TRUE(IsReadyForEncode());
+ return 0;
+ }
+
+ virtual int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) OVERRIDE {
+ EXPECT_TRUE(IsReadyForEncode());
+ return 0;
+ }
+
+ virtual void OnStreamsCreated(
+ VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStream*>& receive_streams) OVERRIDE {
+ // Encoder initialization should be done in stream construction before
+ // starting.
+ EXPECT_TRUE(IsReadyForEncode());
+ stream_ = send_stream;
+ }
+
+ virtual void ModifyConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ std::vector<VideoStream>* video_streams) OVERRIDE {
+ send_config->encoder_settings.encoder = this;
+ video_streams_ = *video_streams;
+ }
+
+ virtual void PerformTest() OVERRIDE {
+ EXPECT_EQ(kEventSignaled, Wait())
+ << "Timed out while waiting for Encode.";
+ EXPECT_EQ(0u, num_releases());
+ stream_->ReconfigureVideoEncoder(video_streams_, NULL);
+ EXPECT_EQ(0u, num_releases());
+ stream_->Stop();
+ // Encoder should not be released before destroying the VideoSendStream.
+ EXPECT_FALSE(IsReleased());
+ EXPECT_TRUE(IsReadyForEncode());
+ stream_->Start();
+ // Sanity check, make sure we still encode frames with this encoder.
+ EXPECT_EQ(kEventSignaled, Wait())
+ << "Timed out while waiting for Encode.";
+ }
+
+ scoped_ptr<CriticalSectionWrapper> crit_;
+ VideoSendStream* stream_;
+ bool initialized_ GUARDED_BY(crit_);
+ bool callback_registered_ GUARDED_BY(crit_);
+ size_t num_releases_ GUARDED_BY(crit_);
+ bool released_ GUARDED_BY(crit_);
+ std::vector<VideoStream> video_streams_;
+ } test_encoder;
+
+ RunBaseTest(&test_encoder);
+
+ EXPECT_TRUE(test_encoder.IsReleased());
+ EXPECT_EQ(1u, test_encoder.num_releases());
+}
+
} // namespace webrtc
diff --git a/video_engine/include/vie_base.h b/video_engine/include/vie_base.h
index 0a528cb1..f4b99ae7 100644
--- a/video_engine/include/vie_base.h
+++ b/video_engine/include/vie_base.h
@@ -43,29 +43,11 @@ class CpuOveruseObserver {
virtual ~CpuOveruseObserver() {}
};
-// Limits on standard deviation for under/overuse.
-#ifdef WEBRTC_ANDROID
-const float kOveruseStdDevMs = 32.0f;
-const float kNormalUseStdDevMs = 27.0f;
-#elif WEBRTC_LINUX
-const float kOveruseStdDevMs = 20.0f;
-const float kNormalUseStdDevMs = 14.0f;
-#elif WEBRTC_MAC
-const float kOveruseStdDevMs = 27.0f;
-const float kNormalUseStdDevMs = 21.0f;
-#elif WEBRTC_WIN
-const float kOveruseStdDevMs = 20.0f;
-const float kNormalUseStdDevMs = 14.0f;
-#else
-const float kOveruseStdDevMs = 30.0f;
-const float kNormalUseStdDevMs = 20.0f;
-#endif
-
struct CpuOveruseOptions {
CpuOveruseOptions()
: enable_capture_jitter_method(true),
- low_capture_jitter_threshold_ms(kNormalUseStdDevMs),
- high_capture_jitter_threshold_ms(kOveruseStdDevMs),
+ low_capture_jitter_threshold_ms(20.0f),
+ high_capture_jitter_threshold_ms(30.0f),
enable_encode_usage_method(false),
low_encode_usage_threshold_percent(60),
high_encode_usage_threshold_percent(90),
@@ -216,26 +198,11 @@ class WEBRTC_DLLEXPORT ViEBase {
CpuOveruseObserver* observer) = 0;
// Sets options for cpu overuse detector.
- // TODO(asapersson): Remove default implementation.
virtual int SetCpuOveruseOptions(int channel,
- const CpuOveruseOptions& options) {
- return -1;
- }
+ const CpuOveruseOptions& options) = 0;
// Gets cpu overuse measures.
- // TODO(asapersson): Remove default implementation.
- virtual int GetCpuOveruseMetrics(int channel,
- CpuOveruseMetrics* metrics) {
- return -1;
- }
- // TODO(asapersson): Remove this function when libjingle has been updated.
- virtual int CpuOveruseMeasures(int channel,
- int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s) {
- return -1;
- }
+ virtual int GetCpuOveruseMetrics(int channel, CpuOveruseMetrics* metrics) = 0;
// Specifies the VoiceEngine and VideoEngine channel pair to use for
// audio/video synchronization.
diff --git a/video_engine/include/vie_rtp_rtcp.h b/video_engine/include/vie_rtp_rtcp.h
index 972ca584..3a565610 100644
--- a/video_engine/include/vie_rtp_rtcp.h
+++ b/video_engine/include/vie_rtp_rtcp.h
@@ -23,6 +23,7 @@
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
namespace webrtc {
@@ -151,6 +152,17 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP {
virtual int SetStartSequenceNumber(const int video_channel,
unsigned short sequence_number) = 0;
+ // TODO(pbos): Remove default implementation once this has been implemented
+ // in libjingle.
+ virtual void SetRtpStateForSsrc(int video_channel,
+ uint32_t ssrc,
+ const RtpState& rtp_state) {}
+ // TODO(pbos): Remove default implementation once this has been implemented
+ // in libjingle.
+ virtual RtpState GetRtpStateForSsrc(int video_channel, uint32_t ssrc) {
+ return RtpState();
+ }
+
// This function sets the RTCP status for the specified channel.
// Default mode is kRtcpCompound_RFC4585.
virtual int SetRTCPStatus(const int video_channel,
@@ -261,8 +273,7 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP {
// Enables/disables RTCP Receiver Reference Time Report Block extension/
// DLRR Report Block extension (RFC 3611).
- // TODO(asapersson): Remove default implementation.
- virtual int SetRtcpXrRrtrStatus(int video_channel, bool enable) { return -1; }
+ virtual int SetRtcpXrRrtrStatus(int video_channel, bool enable) = 0;
// Enables transmission smoothening, i.e. packets belonging to the same frame
// will be sent over a longer period of time instead of sending them
diff --git a/video_engine/test/auto_test/automated/vie_network_test.cc b/video_engine/test/auto_test/automated/vie_network_test.cc
index 1e46b3d0..a2d060e7 100644
--- a/video_engine/test/auto_test/automated/vie_network_test.cc
+++ b/video_engine/test/auto_test/automated/vie_network_test.cc
@@ -31,7 +31,7 @@ class RtcpCollectorTransport : public webrtc::Transport {
}
virtual int SendRTCPPacket(int channel, const void* data, int len) {
const uint8_t* buf = static_cast<const uint8_t*>(data);
- webrtc::ModuleRTPUtility::RTPHeaderParser parser(buf, len);
+ webrtc::RtpUtility::RtpHeaderParser parser(buf, len);
if (parser.RTCP()) {
Packet p;
p.channel = channel;
diff --git a/video_engine/test/auto_test/source/vie_autotest.cc b/video_engine/test/auto_test/source/vie_autotest.cc
index 188567cb..fb1a46f2 100644
--- a/video_engine/test/auto_test/source/vie_autotest.cc
+++ b/video_engine/test/auto_test/source/vie_autotest.cc
@@ -101,6 +101,9 @@ void ViEAutoTest::PrintVideoCodec(const webrtc::VideoCodec videoCodec)
case webrtc::kVideoCodecI420:
ViETest::Log("\tcodecType: I420");
break;
+ case webrtc::kVideoCodecH264:
+ ViETest::Log("\tcodecType: H264");
+ break;
case webrtc::kVideoCodecRED:
ViETest::Log("\tcodecType: RED");
break;
diff --git a/video_engine/vie_base_impl.cc b/video_engine/vie_base_impl.cc
index 8b4da279..6ba2fd5f 100644
--- a/video_engine/vie_base_impl.cc
+++ b/video_engine/vie_base_impl.cc
@@ -121,37 +121,6 @@ int ViEBaseImpl::SetCpuOveruseOptions(int video_channel,
return -1;
}
-int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
- int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s) {
- ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
- ViEChannel* vie_channel = cs.Channel(video_channel);
- if (!vie_channel) {
- shared_data_.SetLastError(kViEBaseInvalidChannelId);
- return -1;
- }
- ViEEncoder* vie_encoder = cs.Encoder(video_channel);
- assert(vie_encoder);
-
- ViEInputManagerScoped is(*(shared_data_.input_manager()));
- ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder);
- if (provider) {
- ViECapturer* capturer = is.Capture(provider->Id());
- if (capturer) {
- CpuOveruseMetrics metrics;
- capturer->GetCpuOveruseMetrics(&metrics);
- *capture_jitter_ms = metrics.capture_jitter_ms;
- *avg_encode_time_ms = metrics.avg_encode_time_ms;
- *encode_usage_percent = metrics.encode_usage_percent;
- *capture_queue_delay_ms_per_s = metrics.capture_queue_delay_ms_per_s;
- return 0;
- }
- }
- return -1;
-}
-
int ViEBaseImpl::GetCpuOveruseMetrics(int video_channel,
CpuOveruseMetrics* metrics) {
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
diff --git a/video_engine/vie_base_impl.h b/video_engine/vie_base_impl.h
index d6a046e6..2f847bc6 100644
--- a/video_engine/vie_base_impl.h
+++ b/video_engine/vie_base_impl.h
@@ -37,11 +37,6 @@ class ViEBaseImpl
const CpuOveruseOptions& options);
virtual int GetCpuOveruseMetrics(int channel,
CpuOveruseMetrics* metrics);
- virtual int CpuOveruseMeasures(int channel,
- int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s);
virtual int CreateChannel(int& video_channel); // NOLINT
virtual int CreateChannel(int& video_channel, // NOLINT
const Config* config);
diff --git a/video_engine/vie_channel.cc b/video_engine/vie_channel.cc
index 80d3065b..46622518 100644
--- a/video_engine/vie_channel.cc
+++ b/video_engine/vie_channel.cc
@@ -116,6 +116,7 @@ ViEChannel::ViEChannel(int32_t channel_id,
configuration.remote_bitrate_estimator = remote_bitrate_estimator;
configuration.paced_sender = paced_sender;
configuration.receive_statistics = vie_receiver_.GetReceiveStatistics();
+ configuration.send_bitrate_observer = &send_bitrate_observer_;
rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(configuration));
vie_receiver_.SetRtpRtcpModule(rtp_rtcp_.get());
@@ -278,6 +279,11 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
rtp_rtcp->SetSendingStatus(rtp_rtcp_->Sending());
rtp_rtcp->SetSendingMediaStatus(rtp_rtcp_->SendingMedia());
+ int mode;
+ uint32_t ssrc;
+ int payload_type;
+ rtp_rtcp_->RTXSendStatus(&mode, &ssrc, &payload_type);
+ rtp_rtcp->SetRTXSendStatus(mode);
simulcast_rtp_rtcp_.push_back(rtp_rtcp);
// Silently ignore error.
@@ -295,7 +301,6 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
rtp_rtcp->RegisterSendFrameCountObserver(NULL);
rtp_rtcp->RegisterSendChannelRtcpStatisticsCallback(NULL);
rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(NULL);
- rtp_rtcp->RegisterVideoBitrateObserver(NULL);
simulcast_rtp_rtcp_.pop_back();
removed_rtp_rtcp_.push_front(rtp_rtcp);
}
@@ -347,8 +352,6 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
rtp_rtcp_->GetSendChannelRtcpStatisticsCallback());
rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(
rtp_rtcp_->GetSendChannelRtpStatisticsCallback());
- rtp_rtcp->RegisterVideoBitrateObserver(
- rtp_rtcp_->GetVideoBitrateObserver());
}
// |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
// modules can be deleted after this step.
@@ -362,7 +365,6 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
rtp_rtcp->RegisterSendFrameCountObserver(NULL);
rtp_rtcp->RegisterSendChannelRtcpStatisticsCallback(NULL);
rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(NULL);
- rtp_rtcp->RegisterVideoBitrateObserver(NULL);
simulcast_rtp_rtcp_.pop_back();
removed_rtp_rtcp_.push_front(rtp_rtcp);
}
@@ -888,6 +890,21 @@ int32_t ViEChannel::SetStartSequenceNumber(uint16_t sequence_number) {
return rtp_rtcp_->SetSequenceNumber(sequence_number);
}
+void ViEChannel::SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state) {
+ assert(!rtp_rtcp_->Sending());
+ default_rtp_rtcp_->SetRtpStateForSsrc(ssrc, rtp_state);
+}
+
+RtpState ViEChannel::GetRtpStateForSsrc(uint32_t ssrc) {
+ assert(!rtp_rtcp_->Sending());
+
+ RtpState rtp_state;
+ if (!default_rtp_rtcp_->GetRtpStateForSsrc(ssrc, &rtp_state)) {
+ LOG(LS_ERROR) << "Couldn't get RTP state for ssrc: " << ssrc;
+ }
+ return rtp_state;
+}
+
int32_t ViEChannel::SetRTCPCName(const char rtcp_cname[]) {
if (rtp_rtcp_->Sending()) {
return -1;
@@ -1192,13 +1209,7 @@ bool ViEChannel::GetSendSideDelay(int* avg_send_delay,
void ViEChannel::RegisterSendBitrateObserver(
BitrateStatisticsObserver* observer) {
- rtp_rtcp_->RegisterVideoBitrateObserver(observer);
- CriticalSectionScoped cs(rtp_rtcp_cs_.get());
- for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
- it != simulcast_rtp_rtcp_.end();
- it++) {
- (*it)->RegisterVideoBitrateObserver(observer);
- }
+ send_bitrate_observer_.Set(observer);
}
void ViEChannel::GetReceiveBandwidthEstimatorStats(
@@ -1516,7 +1527,6 @@ void ViEChannel::ReserveRtpRtcpModules(size_t num_modules) {
rtp_rtcp->RegisterSendFrameCountObserver(NULL);
rtp_rtcp->RegisterSendChannelRtcpStatisticsCallback(NULL);
rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(NULL);
- rtp_rtcp->RegisterVideoBitrateObserver(NULL);
removed_rtp_rtcp_.push_back(rtp_rtcp);
}
}
diff --git a/video_engine/vie_channel.h b/video_engine/vie_channel.h
index c76d1298..39f9b75c 100644
--- a/video_engine/vie_channel.h
+++ b/video_engine/vie_channel.h
@@ -16,6 +16,7 @@
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/typedefs.h"
@@ -152,6 +153,9 @@ class ViEChannel
// Sets the starting sequence number, must be called before StartSend.
int32_t SetStartSequenceNumber(uint16_t sequence_number);
+ void SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state);
+ RtpState GetRtpStateForSsrc(uint32_t ssrc);
+
// Sets the CName for the outgoing stream on the channel.
int32_t SetRTCPCName(const char rtcp_cname[]);
@@ -379,6 +383,43 @@ class ViEChannel
int GetRequiredNackListSize(int target_delay_ms);
void SetRtxSendStatus(bool enable);
+ // ViEChannel exposes methods that allow to modify observers and callbacks
+ // to be modified. Such an API-style is cumbersome to implement and maintain
+ // at all the levels when comparing to only setting them at construction. As
+ // so this class instantiates its children with a wrapper that can be modified
+ // at a later time.
+ template <class T>
+ class RegisterableCallback : public T {
+ public:
+ RegisterableCallback()
+ : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+ callback_(NULL) {}
+
+ void Set(T* callback) {
+ CriticalSectionScoped cs(critsect_.get());
+ callback_ = callback;
+ }
+
+ protected:
+ // Note: this should be implemented with a RW-lock to allow simultaneous
+ // calls into the callback. However that doesn't seem to be needed for the
+ // current type of callbacks covered by this class.
+ scoped_ptr<CriticalSectionWrapper> critsect_;
+ T* callback_ GUARDED_BY(critsect_);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(RegisterableCallback);
+ };
+
+ class : public RegisterableCallback<BitrateStatisticsObserver> {
+ virtual void Notify(const BitrateStatistics& stats, uint32_t ssrc) {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->Notify(stats, ssrc);
+ }
+ }
+ send_bitrate_observer_;
+
int32_t channel_id_;
int32_t engine_id_;
uint32_t number_of_cores_;
diff --git a/video_engine/vie_codec_impl.cc b/video_engine/vie_codec_impl.cc
index 3ba56de5..050958e2 100644
--- a/video_engine/vie_codec_impl.cc
+++ b/video_engine/vie_codec_impl.cc
@@ -69,6 +69,18 @@ static void LogCodec(const VideoCodec& codec) {
<< ", qp max "
<< codec.simulcastStream[idx].qpMax;
}
+ } else if (codec.codecType == kVideoCodecH264) {
+ LOG(LS_INFO) << "H264 specific settings";
+ LOG(LS_INFO) << "profile: "
+ << codec.codecSpecific.H264.profile
+ << ", framedropping: "
+ << codec.codecSpecific.H264.frameDroppingOn
+ << ", keyFrameInterval: "
+ << codec.codecSpecific.H264.keyFrameInterval
+ << ", spslen: "
+ << codec.codecSpecific.H264.spsLen
+ << ", ppslen: "
+ << codec.codecSpecific.H264.ppsLen;
}
}
@@ -629,7 +641,9 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
} else if ((video_codec.codecType == kVideoCodecVP8 &&
strncmp(video_codec.plName, "VP8", 4) == 0) ||
(video_codec.codecType == kVideoCodecI420 &&
- strncmp(video_codec.plName, "I420", 4) == 0)) {
+ strncmp(video_codec.plName, "I420", 4) == 0) ||
+ (video_codec.codecType == kVideoCodecH264 &&
+ strncmp(video_codec.plName, "H264", 4) == 0)) {
// OK.
} else if (video_codec.codecType != kVideoCodecGeneric) {
LOG(LS_ERROR) << "Codec type and name mismatch.";
diff --git a/video_engine/vie_encoder.cc b/video_engine/vie_encoder.cc
index a009c06a..fd3f0d78 100644
--- a/video_engine/vie_encoder.cc
+++ b/video_engine/vie_encoder.cc
@@ -23,6 +23,7 @@
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
@@ -34,16 +35,6 @@
namespace webrtc {
-// Pace in kbits/s until we receive first estimate.
-static const int kInitialPace = 2000;
-
-// Pacing-rate relative to our target send rate.
-// Multiplicative factor that is applied to the target bitrate to calculate the
-// number of bytes that can be transmitted per interval.
-// Increasing this factor will result in lower delays in cases of bitrate
-// overshoots from the encoder.
-static const float kPaceMultiplier = 2.5f;
-
// Margin on when we pause the encoder when the pacing buffer overflows relative
// to the configured buffer delay.
static const float kEncoderPausePacerMargin = 2.0f;
@@ -172,7 +163,8 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
bitrate_observer_.reset(new ViEBitrateObserver(this));
pacing_callback_.reset(new ViEPacedSenderCallback(this));
paced_sender_.reset(
- new PacedSender(pacing_callback_.get(), kInitialPace, kPaceMultiplier));
+ new PacedSender(Clock::GetRealTimeClock(), pacing_callback_.get(),
+ PacedSender::kDefaultInitialPaceKbps, 0));
}
bool ViEEncoder::Init() {
@@ -383,8 +375,9 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
if (pad_up_to_bitrate_kbps < min_transmit_bitrate_kbps_)
pad_up_to_bitrate_kbps = min_transmit_bitrate_kbps_;
- paced_sender_->UpdateBitrate(kPaceMultiplier * video_codec.startBitrate,
- pad_up_to_bitrate_kbps);
+ paced_sender_->UpdateBitrate(
+ PacedSender::kDefaultPaceMultiplier * video_codec.startBitrate,
+ pad_up_to_bitrate_kbps);
return 0;
}
@@ -894,8 +887,9 @@ void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
if (pad_up_to_bitrate_kbps > bitrate_kbps)
pad_up_to_bitrate_kbps = bitrate_kbps;
- paced_sender_->UpdateBitrate(kPaceMultiplier * bitrate_kbps,
- pad_up_to_bitrate_kbps);
+ paced_sender_->UpdateBitrate(
+ PacedSender::kDefaultPaceMultiplier * bitrate_kbps,
+ pad_up_to_bitrate_kbps);
default_rtp_rtcp_->SetTargetSendBitrate(stream_bitrates);
if (video_suspended_ == video_is_suspended)
return;
diff --git a/video_engine/vie_rtp_rtcp_impl.cc b/video_engine/vie_rtp_rtcp_impl.cc
index 53610b4a..04f5e9b0 100644
--- a/video_engine/vie_rtp_rtcp_impl.cc
+++ b/video_engine/vie_rtp_rtcp_impl.cc
@@ -256,6 +256,30 @@ int ViERTP_RTCPImpl::SetStartSequenceNumber(const int video_channel,
return 0;
}
+void ViERTP_RTCPImpl::SetRtpStateForSsrc(int video_channel,
+ uint32_t ssrc,
+ const RtpState& rtp_state) {
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel)
+ return;
+
+ if (vie_channel->Sending()) {
+ LOG_F(LS_ERROR) << "channel " << video_channel << " is already sending.";
+ return;
+ }
+ vie_channel->SetRtpStateForSsrc(ssrc, rtp_state);
+}
+
+RtpState ViERTP_RTCPImpl::GetRtpStateForSsrc(int video_channel, uint32_t ssrc) {
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel)
+ return RtpState();
+
+ return vie_channel->GetRtpStateForSsrc(ssrc);
+}
+
int ViERTP_RTCPImpl::SetRTCPStatus(const int video_channel,
const ViERTCPMode rtcp_mode) {
LOG_F(LS_INFO) << "channel: " << video_channel
diff --git a/video_engine/vie_rtp_rtcp_impl.h b/video_engine/vie_rtp_rtcp_impl.h
index 5eec0efe..4afe1c58 100644
--- a/video_engine/vie_rtp_rtcp_impl.h
+++ b/video_engine/vie_rtp_rtcp_impl.h
@@ -46,6 +46,11 @@ class ViERTP_RTCPImpl
const uint8_t payload_type);
virtual int SetStartSequenceNumber(const int video_channel,
uint16_t sequence_number);
+ virtual void SetRtpStateForSsrc(int video_channel,
+ uint32_t ssrc,
+ const RtpState& rtp_state) OVERRIDE;
+ virtual RtpState GetRtpStateForSsrc(int video_channel,
+ uint32_t ssrc) OVERRIDE;
virtual int SetRTCPStatus(const int video_channel,
const ViERTCPMode rtcp_mode);
virtual int GetRTCPStatus(const int video_channel,
diff --git a/video_engine/vie_sender.cc b/video_engine/vie_sender.cc
index 349bc72f..28bf3903 100644
--- a/video_engine/vie_sender.cc
+++ b/video_engine/vie_sender.cc
@@ -11,6 +11,7 @@
#include "webrtc/video_engine/vie_sender.h"
#include <assert.h>
+#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
diff --git a/video_receive_stream.h b/video_receive_stream.h
index 2584e204..b1633f93 100644
--- a/video_receive_stream.h
+++ b/video_receive_stream.h
@@ -90,7 +90,7 @@ class VideoReceiveStream {
: remote_ssrc(0),
local_ssrc(0),
rtcp_mode(newapi::kRtcpReducedSize),
- remb(false) {}
+ remb(true) {}
// Synchronization source (stream identifier) to be received.
uint32_t remote_ssrc;
diff --git a/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc b/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc
index cf33adf1..a678b13a 100644
--- a/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc
+++ b/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc
@@ -31,7 +31,9 @@ class ExtensionVerifyTransport : public webrtc::Transport {
virtual int SendPacket(int channel, const void* data, int len) {
webrtc::RTPHeader header;
- if (parser_->Parse(static_cast<const uint8_t*>(data), len, &header)) {
+ if (parser_->Parse(reinterpret_cast<const uint8_t*>(data),
+ static_cast<size_t>(len),
+ &header)) {
bool ok = true;
if (audio_level_id_ >= 0 &&
!header.extension.hasAudioLevel) {
diff --git a/webrtc_examples.gyp b/webrtc_examples.gyp
index 56a0e9ef..e8d1b982 100644
--- a/webrtc_examples.gyp
+++ b/webrtc_examples.gyp
@@ -15,7 +15,6 @@
'type': 'loadable_module',
'dependencies': [
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
- '<(webrtc_root)/modules/modules.gyp:*',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core',
diff --git a/webrtc_tests.gypi b/webrtc_tests.gypi
index 85a7e733..78b199f0 100644
--- a/webrtc_tests.gypi
+++ b/webrtc_tests.gypi
@@ -79,6 +79,7 @@
'video/call_perf_tests.cc',
'video/full_stack.cc',
'video/rampup_tests.cc',
+ 'video/rampup_tests.h',
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',